diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei16.c index 39c884184b1d94..5cf588fc209160 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_th_vloxei16_v_f16m1(const _Float16 *base, vuint16m1_t bindex, } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f16.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m2_t test_th_vloxei16_v_f16m2(const _Float16 *base, vuint16m2_t bindex, } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f16.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m4_t test_th_vloxei16_v_f16m4(const _Float16 *base, vuint16m4_t bindex, } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32f16.nxv32i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m8_t test_th_vloxei16_v_f16m8(const _Float16 *base, vuint16m8_t bindex, } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f32.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m2_t test_th_vloxei16_v_f32m2(const float *base, vuint16m1_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f32.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat32m4_t test_th_vloxei16_v_f32m4(const float *base, vuint16m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f32.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat32m8_t test_th_vloxei16_v_f32m8(const float *base, vuint16m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f64.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat64m4_t test_th_vloxei16_v_f64m4(const double *base, vuint16m1_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f64.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat64m8_t test_th_vloxei16_v_f64m8(const double *base, vuint16m2_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint8m1_t test_th_vloxei16_v_i8m1(const int8_t *base, vuint16m2_t bindex, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint8m2_t test_th_vloxei16_v_i8m2(const int8_t *base, vuint16m4_t bindex, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i8.nxv32i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint8m4_t test_th_vloxei16_v_i8m4(const int8_t *base, vuint16m8_t bindex, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m1_t test_th_vloxei16_v_i16m1(const int16_t *base, vuint16m1_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m2_t test_th_vloxei16_v_i16m2(const int16_t *base, vuint16m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m4_t test_th_vloxei16_v_i16m4(const int16_t *base, vuint16m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_th_vloxei16_v_i16m8(const int16_t *base, vuint16m8_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m2_t test_th_vloxei16_v_i32m2(const int32_t *base, vuint16m1_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m4_t test_th_vloxei16_v_i32m4(const int32_t *base, vuint16m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m8_t test_th_vloxei16_v_i32m8(const int32_t *base, vuint16m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint64m4_t test_th_vloxei16_v_i64m4(const int64_t *base, vuint16m1_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint64m8_t test_th_vloxei16_v_i64m8(const int64_t *base, vuint16m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint8m1_t test_th_vloxei16_v_u8m1(const uint8_t *base, vuint16m2_t bindex, size } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint8m2_t test_th_vloxei16_v_u8m2(const uint8_t *base, vuint16m4_t bindex, size } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i8.nxv32i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint8m4_t test_th_vloxei16_v_u8m4(const uint8_t *base, vuint16m8_t bindex, size } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint16m1_t test_th_vloxei16_v_u16m1(const uint16_t *base, vuint16m1_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint16m2_t test_th_vloxei16_v_u16m2(const uint16_t *base, vuint16m2_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint16m4_t test_th_vloxei16_v_u16m4(const uint16_t *base, vuint16m4_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint16m8_t test_th_vloxei16_v_u16m8(const uint16_t *base, vuint16m8_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint32m2_t test_th_vloxei16_v_u32m2(const uint32_t *base, vuint16m1_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint32m4_t test_th_vloxei16_v_u32m4(const uint32_t *base, vuint16m2_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint32m8_t test_th_vloxei16_v_u32m8(const uint32_t *base, vuint16m4_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m4_t test_th_vloxei16_v_u64m4(const uint64_t *base, vuint16m1_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei32.c index 4abce254c22589..c63d50b5683b18 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_th_vloxei32_v_f16m1(const _Float16 *base, vuint32m2_t bindex, } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f16.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m2_t test_th_vloxei32_v_f16m2(const _Float16 *base, vuint32m4_t bindex, } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f16.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m4_t test_th_vloxei32_v_f16m4(const _Float16 *base, vuint32m8_t bindex, } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2f32.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat32m1_t test_th_vloxei32_v_f32m1(const float *base, vuint32m1_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f32.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m2_t test_th_vloxei32_v_f32m2(const float *base, vuint32m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f32.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat32m4_t test_th_vloxei32_v_f32m4(const float *base, vuint32m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f32.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat32m8_t test_th_vloxei32_v_f32m8(const float *base, vuint32m8_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2f64.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat64m2_t test_th_vloxei32_v_f64m2(const double *base, vuint32m1_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f64.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat64m4_t test_th_vloxei32_v_f64m4(const double *base, vuint32m2_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f64.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat64m8_t test_th_vloxei32_v_f64m8(const double *base, vuint32m4_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint8m1_t test_th_vloxei32_v_i8m1(const int8_t *base, vuint32m4_t bindex, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint8m2_t test_th_vloxei32_v_i8m2(const int8_t *base, vuint32m8_t bindex, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m1_t test_th_vloxei32_v_i16m1(const int16_t *base, vuint32m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m2_t test_th_vloxei32_v_i16m2(const int16_t *base, vuint32m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m4_t test_th_vloxei32_v_i16m4(const int16_t *base, vuint32m8_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint32m1_t test_th_vloxei32_v_i32m1(const int32_t *base, vuint32m1_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m2_t test_th_vloxei32_v_i32m2(const int32_t *base, vuint32m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m4_t test_th_vloxei32_v_i32m4(const int32_t *base, vuint32m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m8_t test_th_vloxei32_v_i32m8(const int32_t *base, vuint32m8_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i64.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint64m2_t test_th_vloxei32_v_i64m2(const int64_t *base, vuint32m1_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint64m4_t test_th_vloxei32_v_i64m4(const int64_t *base, vuint32m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint64m8_t test_th_vloxei32_v_i64m8(const int64_t *base, vuint32m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint8m1_t test_th_vloxei32_v_u8m1(const uint8_t *base, vuint32m4_t bindex, size } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint8m2_t test_th_vloxei32_v_u8m2(const uint8_t *base, vuint32m8_t bindex, size } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint16m1_t test_th_vloxei32_v_u16m1(const uint16_t *base, vuint32m2_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint16m2_t test_th_vloxei32_v_u16m2(const uint16_t *base, vuint32m4_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint16m4_t test_th_vloxei32_v_u16m4(const uint16_t *base, vuint32m8_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_th_vloxei32_v_u32m1(const uint32_t *base, vuint32m1_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint32m2_t test_th_vloxei32_v_u32m2(const uint32_t *base, vuint32m2_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint32m4_t test_th_vloxei32_v_u32m4(const uint32_t *base, vuint32m4_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint32m8_t test_th_vloxei32_v_u32m8(const uint32_t *base, vuint32m8_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i64.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m2_t test_th_vloxei32_v_u64m2(const uint64_t *base, vuint32m1_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint64m4_t test_th_vloxei32_v_u64m4(const uint64_t *base, vuint32m2_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei64.c index 16a2171eda0da5..bc861141e0b085 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_th_vloxei64_v_f16m1(const _Float16 *base, vuint64m4_t bindex, } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f16.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m2_t test_th_vloxei64_v_f16m2(const _Float16 *base, vuint64m8_t bindex, } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2f32.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat32m1_t test_th_vloxei64_v_f32m1(const float *base, vuint64m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f32.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat32m2_t test_th_vloxei64_v_f32m2(const float *base, vuint64m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f32.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m4_t test_th_vloxei64_v_f32m4(const float *base, vuint64m8_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv1f64.nxv1i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat64m1_t test_th_vloxei64_v_f64m1(const double *base, vuint64m1_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2f64.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat64m2_t test_th_vloxei64_v_f64m2(const double *base, vuint64m2_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f64.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat64m4_t test_th_vloxei64_v_f64m4(const double *base, vuint64m4_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f64.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat64m8_t test_th_vloxei64_v_f64m8(const double *base, vuint64m8_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint8m1_t test_th_vloxei64_v_i8m1(const int8_t *base, vuint64m8_t bindex, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m1_t test_th_vloxei64_v_i16m1(const int16_t *base, vuint64m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_th_vloxei64_v_i16m2(const int16_t *base, vuint64m8_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i32.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint32m1_t test_th_vloxei64_v_i32m1(const int32_t *base, vuint64m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint32m2_t test_th_vloxei64_v_i32m2(const int32_t *base, vuint64m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint32m4_t test_th_vloxei64_v_i32m4(const int32_t *base, vuint64m8_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_th_vloxei64_v_i64m1(const int64_t *base, vuint64m1_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint64m2_t test_th_vloxei64_v_i64m2(const int64_t *base, vuint64m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint64m4_t test_th_vloxei64_v_i64m4(const int64_t *base, vuint64m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint64m8_t test_th_vloxei64_v_i64m8(const int64_t *base, vuint64m8_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_th_vloxei64_v_u8m1(const uint8_t *base, vuint64m8_t bindex, size } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_th_vloxei64_v_u16m1(const uint16_t *base, vuint64m4_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m2_t test_th_vloxei64_v_u16m2(const uint16_t *base, vuint64m8_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i32.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m1_t test_th_vloxei64_v_u32m1(const uint32_t *base, vuint64m2_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m2_t test_th_vloxei64_v_u32m2(const uint32_t *base, vuint64m4_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m4_t test_th_vloxei64_v_u32m4(const uint32_t *base, vuint64m8_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_th_vloxei64_v_u64m1(const uint64_t *base, vuint64m1_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_th_vloxei64_v_u64m2(const uint64_t *base, vuint64m2_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m4_t test_th_vloxei64_v_u64m4(const uint64_t *base, vuint64m4_t bindex, s } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei8.c index 68e3239f2fcef0..27d6b76cb117ec 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m2_t test_th_vloxei8_v_f16m2(const _Float16 *base, vuint8m1_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f16.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m4_t test_th_vloxei8_v_f16m4(const _Float16 *base, vuint8m2_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32f16.nxv32i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m8_t test_th_vloxei8_v_f16m8(const _Float16 *base, vuint8m4_t bindex, si } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f32.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat32m4_t test_th_vloxei8_v_f32m4(const float *base, vuint8m1_t bindex, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f32.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m8_t test_th_vloxei8_v_f32m8(const float *base, vuint8m2_t bindex, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f64.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat64m8_t test_th_vloxei8_v_f64m8(const double *base, vuint8m1_t bindex, size } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m1_t test_th_vloxei8_v_i8m1(const int8_t *base, vuint8m1_t bindex, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m2_t test_th_vloxei8_v_i8m2(const int8_t *base, vuint8m2_t bindex, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint8m4_t test_th_vloxei8_v_i8m4(const int8_t *base, vuint8m4_t bindex, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint8m8_t test_th_vloxei8_v_i8m8(const int8_t *base, vuint8m8_t bindex, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_th_vloxei8_v_i16m2(const int16_t *base, vuint8m1_t bindex, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m4_t test_th_vloxei8_v_i16m4(const int16_t *base, vuint8m2_t bindex, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i16.nxv32i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m8_t test_th_vloxei8_v_i16m8(const int16_t *base, vuint8m4_t bindex, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint32m4_t test_th_vloxei8_v_i32m4(const int32_t *base, vuint8m1_t bindex, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint32m8_t test_th_vloxei8_v_i32m8(const int32_t *base, vuint8m2_t bindex, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m8_t test_th_vloxei8_v_i64m8(const int64_t *base, vuint8m1_t bindex, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_th_vloxei8_v_u8m1(const uint8_t *base, vuint8m1_t bindex, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m2_t test_th_vloxei8_v_u8m2(const uint8_t *base, vuint8m2_t bindex, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m4_t test_th_vloxei8_v_u8m4(const uint8_t *base, vuint8m4_t bindex, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m8_t test_th_vloxei8_v_u8m8(const uint8_t *base, vuint8m8_t bindex, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m2_t test_th_vloxei8_v_u16m2(const uint16_t *base, vuint8m1_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m4_t test_th_vloxei8_v_u16m4(const uint16_t *base, vuint8m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i16.nxv32i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m8_t test_th_vloxei8_v_u16m8(const uint16_t *base, vuint8m4_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m4_t test_th_vloxei8_v_u32m4(const uint32_t *base, vuint8m1_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m8_t test_th_vloxei8_v_u32m8(const uint32_t *base, vuint8m2_t bindex, siz } // CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vlx.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vlx.c index 2ade79a34d2cc9..809950fb61f858 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vlx.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vlx.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlxb_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlxb_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlxb_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlxb_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlxb_v_i16m1(const int16_t *base, vuint16m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlxb_v_i16m2(const int16_t *base, vuint16m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlxb_v_i16m4(const int16_t *base, vuint16m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlxb_v_i16m8(const int16_t *base, vuint16m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlxb_v_i32m1(const int32_t *base, vuint32m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlxb_v_i32m2(const int32_t *base, vuint32m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlxb_v_i32m4(const int32_t *base, vuint32m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlxb_v_i32m8(const int32_t *base, vuint32m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlxb_v_i64m1(const int64_t *base, vuint64m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlxb_v_i64m2(const int64_t *base, vuint64m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlxb_v_i64m4(const int64_t *base, vuint64m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m8_t test_th_vlxb_v_i64m8(const int64_t *base, vuint64m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint8m1_t test_th_vlxh_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint8m2_t test_th_vlxh_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint8m4_t test_th_vlxh_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint8m8_t test_th_vlxh_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint16m1_t test_th_vlxh_v_i16m1(const int16_t *base, vuint16m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint16m2_t test_th_vlxh_v_i16m2(const int16_t *base, vuint16m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint16m4_t test_th_vlxh_v_i16m4(const int16_t *base, vuint16m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint16m8_t test_th_vlxh_v_i16m8(const int16_t *base, vuint16m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m1_t test_th_vlxh_v_i32m1(const int32_t *base, vuint32m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m2_t test_th_vlxh_v_i32m2(const int32_t *base, vuint32m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint32m4_t test_th_vlxh_v_i32m4(const int32_t *base, vuint32m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint32m8_t test_th_vlxh_v_i32m8(const int32_t *base, vuint32m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m1_t test_th_vlxh_v_i64m1(const int64_t *base, vuint64m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m2_t test_th_vlxh_v_i64m2(const int64_t *base, vuint64m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m4_t test_th_vlxh_v_i64m4(const int64_t *base, vuint64m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_th_vlxh_v_i64m8(const int64_t *base, vuint64m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_th_vlxw_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m2_t test_th_vlxw_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m4_t test_th_vlxw_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m8_t test_th_vlxw_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint16m1_t test_th_vlxw_v_i16m1(const int16_t *base, vuint16m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint16m2_t test_th_vlxw_v_i16m2(const int16_t *base, vuint16m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint16m4_t test_th_vlxw_v_i16m4(const int16_t *base, vuint16m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint16m8_t test_th_vlxw_v_i16m8(const int16_t *base, vuint16m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint32m1_t test_th_vlxw_v_i32m1(const int32_t *base, vuint32m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint32m2_t test_th_vlxw_v_i32m2(const int32_t *base, vuint32m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint32m4_t test_th_vlxw_v_i32m4(const int32_t *base, vuint32m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint32m8_t test_th_vlxw_v_i32m8(const int32_t *base, vuint32m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint64m1_t test_th_vlxw_v_i64m1(const int64_t *base, vuint64m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint64m2_t test_th_vlxw_v_i64m2(const int64_t *base, vuint64m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint64m4_t test_th_vlxw_v_i64m4(const int64_t *base, vuint64m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint64m8_t test_th_vlxw_v_i64m8(const int64_t *base, vuint64m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint8m1_t test_th_vlxbu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint8m2_t test_th_vlxbu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint8m4_t test_th_vlxbu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint8m8_t test_th_vlxbu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint16m1_t test_th_vlxbu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint16m2_t test_th_vlxbu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint16m4_t test_th_vlxbu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint16m8_t test_th_vlxbu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint32m1_t test_th_vlxbu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint32m2_t test_th_vlxbu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint32m4_t test_th_vlxbu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint32m8_t test_th_vlxbu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m1_t test_th_vlxbu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m2_t test_th_vlxbu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m4_t test_th_vlxbu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -646,7 +647,7 @@ vuint64m8_t test_th_vlxbu_v_u64m8(const uint64_t *base, vuint64m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -656,7 +657,7 @@ vuint8m1_t test_th_vlxhu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -666,7 +667,7 @@ vuint8m2_t test_th_vlxhu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -676,7 +677,7 @@ vuint8m4_t test_th_vlxhu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -686,7 +687,7 @@ vuint8m8_t test_th_vlxhu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -696,7 +697,7 @@ vuint16m1_t test_th_vlxhu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -706,7 +707,7 @@ vuint16m2_t test_th_vlxhu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -716,7 +717,7 @@ vuint16m4_t test_th_vlxhu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -726,7 +727,7 @@ vuint16m8_t test_th_vlxhu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -736,7 +737,7 @@ vuint32m1_t test_th_vlxhu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -746,7 +747,7 @@ vuint32m2_t test_th_vlxhu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -756,7 +757,7 @@ vuint32m4_t test_th_vlxhu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -766,7 +767,7 @@ vuint32m8_t test_th_vlxhu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -776,7 +777,7 @@ vuint64m1_t test_th_vlxhu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -786,7 +787,7 @@ vuint64m2_t test_th_vlxhu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -796,7 +797,7 @@ vuint64m4_t test_th_vlxhu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -806,7 +807,7 @@ vuint64m8_t test_th_vlxhu_v_u64m8(const uint64_t *base, vuint64m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -816,7 +817,7 @@ vuint8m1_t test_th_vlxwu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -826,7 +827,7 @@ vuint8m2_t test_th_vlxwu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -836,7 +837,7 @@ vuint8m4_t test_th_vlxwu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -846,7 +847,7 @@ vuint8m8_t test_th_vlxwu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -856,7 +857,7 @@ vuint16m1_t test_th_vlxwu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -866,7 +867,7 @@ vuint16m2_t test_th_vlxwu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -876,7 +877,7 @@ vuint16m4_t test_th_vlxwu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -886,7 +887,7 @@ vuint16m8_t test_th_vlxwu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -896,7 +897,7 @@ vuint32m1_t test_th_vlxwu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -906,7 +907,7 @@ vuint32m2_t test_th_vlxwu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -916,7 +917,7 @@ vuint32m4_t test_th_vlxwu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -926,7 +927,7 @@ vuint32m8_t test_th_vlxwu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -936,7 +937,7 @@ vuint64m1_t test_th_vlxwu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -946,7 +947,7 @@ vuint64m2_t test_th_vlxwu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -956,7 +957,7 @@ vuint64m4_t test_th_vlxwu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei16.c index 9ee382660f5983..e12dde5f22647e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsoxei16_v_f16m1(_Float16 *base, vuint16m1_t bindex, vfloat16m1_t v } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsoxei16_v_f16m2(_Float16 *base, vuint16m2_t bindex, vfloat16m2_t v } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsoxei16_v_f16m4(_Float16 *base, vuint16m4_t bindex, vfloat16m4_t v } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32f16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsoxei16_v_f16m8(_Float16 *base, vuint16m8_t bindex, vfloat16m8_t v } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f32.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsoxei16_v_f32m2(float *base, vuint16m1_t bindex, vfloat32m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f32.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsoxei16_v_f32m4(float *base, vuint16m2_t bindex, vfloat32m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f32.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsoxei16_v_f32m8(float *base, vuint16m4_t bindex, vfloat32m8_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f64.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsoxei16_v_f64m4(double *base, vuint16m1_t bindex, vfloat64m4_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f64.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsoxei16_v_f64m8(double *base, vuint16m2_t bindex, vfloat64m8_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsoxei16_v_i8m1(int8_t *base, vuint16m2_t bindex, vint8m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsoxei16_v_i8m2(int8_t *base, vuint16m4_t bindex, vint8m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i8.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsoxei16_v_i8m4(int8_t *base, vuint16m8_t bindex, vint8m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsoxei16_v_i16m1(int16_t *base, vuint16m1_t bindex, vint16m1_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsoxei16_v_i16m2(int16_t *base, vuint16m2_t bindex, vint16m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsoxei16_v_i16m4(int16_t *base, vuint16m4_t bindex, vint16m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsoxei16_v_i16m8(int16_t *base, vuint16m8_t bindex, vint16m8_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsoxei16_v_i32m2(int32_t *base, vuint16m1_t bindex, vint32m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsoxei16_v_i32m4(int32_t *base, vuint16m2_t bindex, vint32m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsoxei16_v_i32m8(int32_t *base, vuint16m4_t bindex, vint32m8_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsoxei16_v_i64m4(int64_t *base, vuint16m1_t bindex, vint64m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsoxei16_v_i64m8(int64_t *base, vuint16m2_t bindex, vint64m8_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsoxei16_v_u8m1(uint8_t *base, vuint16m2_t bindex, vuint8m1_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsoxei16_v_u8m2(uint8_t *base, vuint16m4_t bindex, vuint8m2_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i8.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsoxei16_v_u8m4(uint8_t *base, vuint16m8_t bindex, vuint8m4_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsoxei16_v_u16m1(uint16_t *base, vuint16m1_t bindex, vuint16m1_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vsoxei16_v_u16m2(uint16_t *base, vuint16m2_t bindex, vuint16m2_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vsoxei16_v_u16m4(uint16_t *base, vuint16m4_t bindex, vuint16m4_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vsoxei16_v_u16m8(uint16_t *base, vuint16m8_t bindex, vuint16m8_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vsoxei16_v_u32m2(uint32_t *base, vuint16m1_t bindex, vuint32m2_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vsoxei16_v_u32m4(uint32_t *base, vuint16m2_t bindex, vuint32m4_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vsoxei16_v_u32m8(uint32_t *base, vuint16m4_t bindex, vuint32m8_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -326,7 +327,7 @@ void test_th_vsoxei16_v_u64m4(uint64_t *base, vuint16m1_t bindex, vuint64m4_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei32.c index f702c9eda2d3e1..486671428ee20b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsoxei32_v_f16m1(_Float16 *base, vuint32m2_t bindex, vfloat16m1_t v } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f16.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsoxei32_v_f16m2(_Float16 *base, vuint32m4_t bindex, vfloat16m2_t v } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f16.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsoxei32_v_f16m4(_Float16 *base, vuint32m8_t bindex, vfloat16m4_t v } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2f32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsoxei32_v_f32m1(float *base, vuint32m1_t bindex, vfloat32m1_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsoxei32_v_f32m2(float *base, vuint32m2_t bindex, vfloat32m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsoxei32_v_f32m4(float *base, vuint32m4_t bindex, vfloat32m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsoxei32_v_f32m8(float *base, vuint32m8_t bindex, vfloat32m8_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2f64.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsoxei32_v_f64m2(double *base, vuint32m1_t bindex, vfloat64m2_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f64.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsoxei32_v_f64m4(double *base, vuint32m2_t bindex, vfloat64m4_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f64.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsoxei32_v_f64m8(double *base, vuint32m4_t bindex, vfloat64m8_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsoxei32_v_i8m1(int8_t *base, vuint32m4_t bindex, vint8m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsoxei32_v_i8m2(int8_t *base, vuint32m8_t bindex, vint8m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsoxei32_v_i16m1(int16_t *base, vuint32m2_t bindex, vint16m1_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsoxei32_v_i16m2(int16_t *base, vuint32m4_t bindex, vint16m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsoxei32_v_i16m4(int16_t *base, vuint32m8_t bindex, vint16m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsoxei32_v_i32m1(int32_t *base, vuint32m1_t bindex, vint32m1_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsoxei32_v_i32m2(int32_t *base, vuint32m2_t bindex, vint32m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsoxei32_v_i32m4(int32_t *base, vuint32m4_t bindex, vint32m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsoxei32_v_i32m8(int32_t *base, vuint32m8_t bindex, vint32m8_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i64.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsoxei32_v_i64m2(int64_t *base, vuint32m1_t bindex, vint64m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsoxei32_v_i64m4(int64_t *base, vuint32m2_t bindex, vint64m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsoxei32_v_i64m8(int64_t *base, vuint32m4_t bindex, vint64m8_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsoxei32_v_u8m1(uint8_t *base, vuint32m4_t bindex, vuint8m1_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsoxei32_v_u8m2(uint8_t *base, vuint32m8_t bindex, vuint8m2_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsoxei32_v_u16m1(uint16_t *base, vuint32m2_t bindex, vuint16m1_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vsoxei32_v_u16m2(uint16_t *base, vuint32m4_t bindex, vuint16m2_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vsoxei32_v_u16m4(uint16_t *base, vuint32m8_t bindex, vuint16m4_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vsoxei32_v_u32m1(uint32_t *base, vuint32m1_t bindex, vuint32m1_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vsoxei32_v_u32m2(uint32_t *base, vuint32m2_t bindex, vuint32m2_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vsoxei32_v_u32m4(uint32_t *base, vuint32m4_t bindex, vuint32m4_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vsoxei32_v_u32m8(uint32_t *base, vuint32m8_t bindex, vuint32m8_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i64.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -326,7 +327,7 @@ void test_th_vsoxei32_v_u64m2(uint64_t *base, vuint32m1_t bindex, vuint64m2_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -336,7 +337,7 @@ void test_th_vsoxei32_v_u64m4(uint64_t *base, vuint32m2_t bindex, vuint64m4_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei64.c index edf2b4a89428fb..848837cb0d2ee0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsoxei64_v_f16m1(_Float16 *base, vuint64m4_t bindex, vfloat16m1_t v } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f16.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsoxei64_v_f16m2(_Float16 *base, vuint64m8_t bindex, vfloat16m2_t v } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2f32.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsoxei64_v_f32m1(float *base, vuint64m2_t bindex, vfloat32m1_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f32.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsoxei64_v_f32m2(float *base, vuint64m4_t bindex, vfloat32m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f32.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsoxei64_v_f32m4(float *base, vuint64m8_t bindex, vfloat32m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv1f64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsoxei64_v_f64m1(double *base, vuint64m1_t bindex, vfloat64m1_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2f64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsoxei64_v_f64m2(double *base, vuint64m2_t bindex, vfloat64m2_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsoxei64_v_f64m4(double *base, vuint64m4_t bindex, vfloat64m4_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsoxei64_v_f64m8(double *base, vuint64m8_t bindex, vfloat64m8_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsoxei64_v_i8m1(int8_t *base, vuint64m8_t bindex, vint8m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsoxei64_v_i16m1(int16_t *base, vuint64m4_t bindex, vint16m1_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsoxei64_v_i16m2(int16_t *base, vuint64m8_t bindex, vint16m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i32.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsoxei64_v_i32m1(int32_t *base, vuint64m2_t bindex, vint32m1_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsoxei64_v_i32m2(int32_t *base, vuint64m4_t bindex, vint32m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsoxei64_v_i32m4(int32_t *base, vuint64m8_t bindex, vint32m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsoxei64_v_i64m1(int64_t *base, vuint64m1_t bindex, vint64m1_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsoxei64_v_i64m2(int64_t *base, vuint64m2_t bindex, vint64m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsoxei64_v_i64m4(int64_t *base, vuint64m4_t bindex, vint64m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsoxei64_v_i64m8(int64_t *base, vuint64m8_t bindex, vint64m8_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsoxei64_v_u8m1(uint8_t *base, vuint64m8_t bindex, vuint8m1_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsoxei64_v_u16m1(uint16_t *base, vuint64m4_t bindex, vuint16m1_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsoxei64_v_u16m2(uint16_t *base, vuint64m8_t bindex, vuint16m2_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i32.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsoxei64_v_u32m1(uint32_t *base, vuint64m2_t bindex, vuint32m1_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsoxei64_v_u32m2(uint32_t *base, vuint64m4_t bindex, vuint32m2_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsoxei64_v_u32m4(uint32_t *base, vuint64m8_t bindex, vuint32m4_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vsoxei64_v_u64m1(uint64_t *base, vuint64m1_t bindex, vuint64m1_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vsoxei64_v_u64m2(uint64_t *base, vuint64m2_t bindex, vuint64m2_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vsoxei64_v_u64m4(uint64_t *base, vuint64m4_t bindex, vuint64m4_t va } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei8.c index b3491202589c88..3d7d501fde51d0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsoxei8_v_f16m2(_Float16 *base, vuint8m1_t bindex, vfloat16m2_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f16.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsoxei8_v_f16m4(_Float16 *base, vuint8m2_t bindex, vfloat16m4_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32f16.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsoxei8_v_f16m8(_Float16 *base, vuint8m4_t bindex, vfloat16m8_t val } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f32.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsoxei8_v_f32m4(float *base, vuint8m1_t bindex, vfloat32m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f32.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsoxei8_v_f32m8(float *base, vuint8m2_t bindex, vfloat32m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f64.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsoxei8_v_f64m8(double *base, vuint8m1_t bindex, vfloat64m8_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsoxei8_v_i8m1(int8_t *base, vuint8m1_t bindex, vint8m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsoxei8_v_i8m2(int8_t *base, vuint8m2_t bindex, vint8m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsoxei8_v_i8m4(int8_t *base, vuint8m4_t bindex, vint8m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsoxei8_v_i8m8(int8_t *base, vuint8m8_t bindex, vint8m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsoxei8_v_i16m2(int16_t *base, vuint8m1_t bindex, vint16m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsoxei8_v_i16m4(int16_t *base, vuint8m2_t bindex, vint16m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i16.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsoxei8_v_i16m8(int16_t *base, vuint8m4_t bindex, vint16m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsoxei8_v_i32m4(int32_t *base, vuint8m1_t bindex, vint32m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsoxei8_v_i32m8(int32_t *base, vuint8m2_t bindex, vint32m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsoxei8_v_i64m8(int64_t *base, vuint8m1_t bindex, vint64m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsoxei8_v_u8m1(uint8_t *base, vuint8m1_t bindex, vuint8m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsoxei8_v_u8m2(uint8_t *base, vuint8m2_t bindex, vuint8m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsoxei8_v_u8m4(uint8_t *base, vuint8m4_t bindex, vuint8m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsoxei8_v_u8m8(uint8_t *base, vuint8m8_t bindex, vuint8m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsoxei8_v_u16m2(uint16_t *base, vuint8m1_t bindex, vuint16m2_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsoxei8_v_u16m4(uint16_t *base, vuint8m2_t bindex, vuint16m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i16.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsoxei8_v_u16m8(uint16_t *base, vuint8m4_t bindex, vuint16m8_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsoxei8_v_u32m4(uint32_t *base, vuint8m1_t bindex, vuint32m4_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsoxei8_v_u32m8(uint32_t *base, vuint8m2_t bindex, vuint32m8_t valu } // CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsx.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsx.c index 5fc05703e36528..5e433f648db12e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsx.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsx.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsxb_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsxb_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsxb_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsxb_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsxb_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsxb_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsxb_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsxb_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsxb_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsxb_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsxb_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsxb_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsxb_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsxb_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsxb_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsxb_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsxh_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsxh_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsxh_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsxh_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsxh_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsxh_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsxh_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsxh_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsxh_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vsxh_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vsxh_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vsxh_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vsxh_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vsxh_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vsxh_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -326,7 +327,7 @@ void test_th_vsxh_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -336,7 +337,7 @@ void test_th_vsxw_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -346,7 +347,7 @@ void test_th_vsxw_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -356,7 +357,7 @@ void test_th_vsxw_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -366,7 +367,7 @@ void test_th_vsxw_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -376,7 +377,7 @@ void test_th_vsxw_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -386,7 +387,7 @@ void test_th_vsxw_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -396,7 +397,7 @@ void test_th_vsxw_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -406,7 +407,7 @@ void test_th_vsxw_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -416,7 +417,7 @@ void test_th_vsxw_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -426,7 +427,7 @@ void test_th_vsxw_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -436,7 +437,7 @@ void test_th_vsxw_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -446,7 +447,7 @@ void test_th_vsxw_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -456,7 +457,7 @@ void test_th_vsxw_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -466,7 +467,7 @@ void test_th_vsxw_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -476,7 +477,7 @@ void test_th_vsxw_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -486,7 +487,7 @@ void test_th_vsxw_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -496,7 +497,7 @@ void test_th_vsxb_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -506,7 +507,7 @@ void test_th_vsxb_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -516,7 +517,7 @@ void test_th_vsxb_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -526,7 +527,7 @@ void test_th_vsxb_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -536,7 +537,7 @@ void test_th_vsxb_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -546,7 +547,7 @@ void test_th_vsxb_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -556,7 +557,7 @@ void test_th_vsxb_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -566,7 +567,7 @@ void test_th_vsxb_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -576,7 +577,7 @@ void test_th_vsxb_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -586,7 +587,7 @@ void test_th_vsxb_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -596,7 +597,7 @@ void test_th_vsxb_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -606,7 +607,7 @@ void test_th_vsxb_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -616,7 +617,7 @@ void test_th_vsxb_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -626,7 +627,7 @@ void test_th_vsxb_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -636,7 +637,7 @@ void test_th_vsxb_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -646,7 +647,7 @@ void test_th_vsxb_v_u64m8(uint64_t *base, vuint64m8_t index, vuint64m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -656,7 +657,7 @@ void test_th_vsxh_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -666,7 +667,7 @@ void test_th_vsxh_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -676,7 +677,7 @@ void test_th_vsxh_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -686,7 +687,7 @@ void test_th_vsxh_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -696,7 +697,7 @@ void test_th_vsxh_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -706,7 +707,7 @@ void test_th_vsxh_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -716,7 +717,7 @@ void test_th_vsxh_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -726,7 +727,7 @@ void test_th_vsxh_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -736,7 +737,7 @@ void test_th_vsxh_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -746,7 +747,7 @@ void test_th_vsxh_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -756,7 +757,7 @@ void test_th_vsxh_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -766,7 +767,7 @@ void test_th_vsxh_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -776,7 +777,7 @@ void test_th_vsxh_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -786,7 +787,7 @@ void test_th_vsxh_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -796,7 +797,7 @@ void test_th_vsxh_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -806,7 +807,7 @@ void test_th_vsxh_v_u64m8(uint64_t *base, vuint64m8_t index, vuint64m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -816,7 +817,7 @@ void test_th_vsxw_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -826,7 +827,7 @@ void test_th_vsxw_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -836,7 +837,7 @@ void test_th_vsxw_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -846,7 +847,7 @@ void test_th_vsxw_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -856,7 +857,7 @@ void test_th_vsxw_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -866,7 +867,7 @@ void test_th_vsxw_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -876,7 +877,7 @@ void test_th_vsxw_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -886,7 +887,7 @@ void test_th_vsxw_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -896,7 +897,7 @@ void test_th_vsxw_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -906,7 +907,7 @@ void test_th_vsxw_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -916,7 +917,7 @@ void test_th_vsxw_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -926,7 +927,7 @@ void test_th_vsxw_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -936,7 +937,7 @@ void test_th_vsxw_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -946,7 +947,7 @@ void test_th_vsxw_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -956,7 +957,7 @@ void test_th_vsxw_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vlx.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vlx.c index ba277cd78eea7e..b8c33fc0377bfd 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vlx.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vlx.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlxb_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlxb_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlxb_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlxb_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlxb_v_i16m1(const int16_t *base, vuint16m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlxb_v_i16m2(const int16_t *base, vuint16m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlxb_v_i16m4(const int16_t *base, vuint16m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlxb_v_i16m8(const int16_t *base, vuint16m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlxb_v_i32m1(const int32_t *base, vuint32m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlxb_v_i32m2(const int32_t *base, vuint32m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlxb_v_i32m4(const int32_t *base, vuint32m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlxb_v_i32m8(const int32_t *base, vuint32m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlxb_v_i64m1(const int64_t *base, vuint64m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlxb_v_i64m2(const int64_t *base, vuint64m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlxb_v_i64m4(const int64_t *base, vuint64m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m8_t test_th_vlxb_v_i64m8(const int64_t *base, vuint64m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint8m1_t test_th_vlxh_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint8m2_t test_th_vlxh_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint8m4_t test_th_vlxh_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint8m8_t test_th_vlxh_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint16m1_t test_th_vlxh_v_i16m1(const int16_t *base, vuint16m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint16m2_t test_th_vlxh_v_i16m2(const int16_t *base, vuint16m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint16m4_t test_th_vlxh_v_i16m4(const int16_t *base, vuint16m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint16m8_t test_th_vlxh_v_i16m8(const int16_t *base, vuint16m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m1_t test_th_vlxh_v_i32m1(const int32_t *base, vuint32m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m2_t test_th_vlxh_v_i32m2(const int32_t *base, vuint32m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint32m4_t test_th_vlxh_v_i32m4(const int32_t *base, vuint32m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint32m8_t test_th_vlxh_v_i32m8(const int32_t *base, vuint32m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m1_t test_th_vlxh_v_i64m1(const int64_t *base, vuint64m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m2_t test_th_vlxh_v_i64m2(const int64_t *base, vuint64m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m4_t test_th_vlxh_v_i64m4(const int64_t *base, vuint64m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_th_vlxh_v_i64m8(const int64_t *base, vuint64m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_th_vlxw_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m2_t test_th_vlxw_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m4_t test_th_vlxw_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m8_t test_th_vlxw_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint16m1_t test_th_vlxw_v_i16m1(const int16_t *base, vuint16m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint16m2_t test_th_vlxw_v_i16m2(const int16_t *base, vuint16m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint16m4_t test_th_vlxw_v_i16m4(const int16_t *base, vuint16m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint16m8_t test_th_vlxw_v_i16m8(const int16_t *base, vuint16m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint32m1_t test_th_vlxw_v_i32m1(const int32_t *base, vuint32m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint32m2_t test_th_vlxw_v_i32m2(const int32_t *base, vuint32m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint32m4_t test_th_vlxw_v_i32m4(const int32_t *base, vuint32m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint32m8_t test_th_vlxw_v_i32m8(const int32_t *base, vuint32m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint64m1_t test_th_vlxw_v_i64m1(const int64_t *base, vuint64m1_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint64m2_t test_th_vlxw_v_i64m2(const int64_t *base, vuint64m2_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint64m4_t test_th_vlxw_v_i64m4(const int64_t *base, vuint64m4_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint64m8_t test_th_vlxw_v_i64m8(const int64_t *base, vuint64m8_t index, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint8m1_t test_th_vlxbu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint8m2_t test_th_vlxbu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint8m4_t test_th_vlxbu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint8m8_t test_th_vlxbu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint16m1_t test_th_vlxbu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint16m2_t test_th_vlxbu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint16m4_t test_th_vlxbu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint16m8_t test_th_vlxbu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint32m1_t test_th_vlxbu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint32m2_t test_th_vlxbu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint32m4_t test_th_vlxbu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint32m8_t test_th_vlxbu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m1_t test_th_vlxbu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m2_t test_th_vlxbu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m4_t test_th_vlxbu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -646,7 +647,7 @@ vuint64m8_t test_th_vlxbu_v_u64m8(const uint64_t *base, vuint64m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -656,7 +657,7 @@ vuint8m1_t test_th_vlxhu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -666,7 +667,7 @@ vuint8m2_t test_th_vlxhu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -676,7 +677,7 @@ vuint8m4_t test_th_vlxhu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -686,7 +687,7 @@ vuint8m8_t test_th_vlxhu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -696,7 +697,7 @@ vuint16m1_t test_th_vlxhu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -706,7 +707,7 @@ vuint16m2_t test_th_vlxhu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -716,7 +717,7 @@ vuint16m4_t test_th_vlxhu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -726,7 +727,7 @@ vuint16m8_t test_th_vlxhu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -736,7 +737,7 @@ vuint32m1_t test_th_vlxhu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -746,7 +747,7 @@ vuint32m2_t test_th_vlxhu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -756,7 +757,7 @@ vuint32m4_t test_th_vlxhu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -766,7 +767,7 @@ vuint32m8_t test_th_vlxhu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -776,7 +777,7 @@ vuint64m1_t test_th_vlxhu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -786,7 +787,7 @@ vuint64m2_t test_th_vlxhu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -796,7 +797,7 @@ vuint64m4_t test_th_vlxhu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -806,7 +807,7 @@ vuint64m8_t test_th_vlxhu_v_u64m8(const uint64_t *base, vuint64m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -816,7 +817,7 @@ vuint8m1_t test_th_vlxwu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -826,7 +827,7 @@ vuint8m2_t test_th_vlxwu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -836,7 +837,7 @@ vuint8m4_t test_th_vlxwu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -846,7 +847,7 @@ vuint8m8_t test_th_vlxwu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -856,7 +857,7 @@ vuint16m1_t test_th_vlxwu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -866,7 +867,7 @@ vuint16m2_t test_th_vlxwu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -876,7 +877,7 @@ vuint16m4_t test_th_vlxwu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -886,7 +887,7 @@ vuint16m8_t test_th_vlxwu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -896,7 +897,7 @@ vuint32m1_t test_th_vlxwu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -906,7 +907,7 @@ vuint32m2_t test_th_vlxwu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -916,7 +917,7 @@ vuint32m4_t test_th_vlxwu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -926,7 +927,7 @@ vuint32m8_t test_th_vlxwu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -936,7 +937,7 @@ vuint64m1_t test_th_vlxwu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -946,7 +947,7 @@ vuint64m2_t test_th_vlxwu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -956,7 +957,7 @@ vuint64m4_t test_th_vlxwu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vsx.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vsx.c index a397eb01cb6497..907b3138029ad1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vsx.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vsx.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsxb_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsxb_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsxb_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsxb_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsxb_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsxb_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsxb_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsxb_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsxb_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsxb_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsxb_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsxb_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsxb_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsxb_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsxb_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsxb_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsxh_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsxh_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsxh_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsxh_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsxh_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsxh_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsxh_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsxh_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsxh_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vsxh_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vsxh_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vsxh_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vsxh_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vsxh_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vsxh_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -326,7 +327,7 @@ void test_th_vsxh_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -336,7 +337,7 @@ void test_th_vsxw_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -346,7 +347,7 @@ void test_th_vsxw_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -356,7 +357,7 @@ void test_th_vsxw_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -366,7 +367,7 @@ void test_th_vsxw_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -376,7 +377,7 @@ void test_th_vsxw_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -386,7 +387,7 @@ void test_th_vsxw_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -396,7 +397,7 @@ void test_th_vsxw_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -406,7 +407,7 @@ void test_th_vsxw_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -416,7 +417,7 @@ void test_th_vsxw_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -426,7 +427,7 @@ void test_th_vsxw_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -436,7 +437,7 @@ void test_th_vsxw_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -446,7 +447,7 @@ void test_th_vsxw_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -456,7 +457,7 @@ void test_th_vsxw_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -466,7 +467,7 @@ void test_th_vsxw_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -476,7 +477,7 @@ void test_th_vsxw_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -486,7 +487,7 @@ void test_th_vsxw_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, si } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -496,7 +497,7 @@ void test_th_vsxb_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -506,7 +507,7 @@ void test_th_vsxb_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -516,7 +517,7 @@ void test_th_vsxb_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -526,7 +527,7 @@ void test_th_vsxb_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -536,7 +537,7 @@ void test_th_vsxb_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -546,7 +547,7 @@ void test_th_vsxb_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -556,7 +557,7 @@ void test_th_vsxb_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -566,7 +567,7 @@ void test_th_vsxb_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -576,7 +577,7 @@ void test_th_vsxb_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -586,7 +587,7 @@ void test_th_vsxb_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -596,7 +597,7 @@ void test_th_vsxb_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -606,7 +607,7 @@ void test_th_vsxb_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -616,7 +617,7 @@ void test_th_vsxb_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -626,7 +627,7 @@ void test_th_vsxb_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -636,7 +637,7 @@ void test_th_vsxb_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -646,7 +647,7 @@ void test_th_vsxb_v_u64m8(uint64_t *base, vuint64m8_t index, vuint64m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -656,7 +657,7 @@ void test_th_vsxh_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -666,7 +667,7 @@ void test_th_vsxh_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -676,7 +677,7 @@ void test_th_vsxh_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -686,7 +687,7 @@ void test_th_vsxh_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -696,7 +697,7 @@ void test_th_vsxh_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -706,7 +707,7 @@ void test_th_vsxh_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -716,7 +717,7 @@ void test_th_vsxh_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -726,7 +727,7 @@ void test_th_vsxh_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -736,7 +737,7 @@ void test_th_vsxh_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -746,7 +747,7 @@ void test_th_vsxh_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -756,7 +757,7 @@ void test_th_vsxh_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -766,7 +767,7 @@ void test_th_vsxh_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -776,7 +777,7 @@ void test_th_vsxh_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -786,7 +787,7 @@ void test_th_vsxh_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -796,7 +797,7 @@ void test_th_vsxh_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -806,7 +807,7 @@ void test_th_vsxh_v_u64m8(uint64_t *base, vuint64m8_t index, vuint64m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -816,7 +817,7 @@ void test_th_vsxw_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -826,7 +827,7 @@ void test_th_vsxw_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -836,7 +837,7 @@ void test_th_vsxw_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -846,7 +847,7 @@ void test_th_vsxw_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -856,7 +857,7 @@ void test_th_vsxw_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -866,7 +867,7 @@ void test_th_vsxw_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -876,7 +877,7 @@ void test_th_vsxw_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -886,7 +887,7 @@ void test_th_vsxw_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -896,7 +897,7 @@ void test_th_vsxw_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -906,7 +907,7 @@ void test_th_vsxw_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -916,7 +917,7 @@ void test_th_vsxw_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -926,7 +927,7 @@ void test_th_vsxw_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -936,7 +937,7 @@ void test_th_vsxw_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -946,7 +947,7 @@ void test_th_vsxw_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -956,7 +957,7 @@ void test_th_vsxw_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/rvv-errors.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/rvv-errors.c index eb8f8a15b4a818..6bbf6a18eeb1d3 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/rvv-errors.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/rvv-errors.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 %s -triple=riscv64 -target-feature +xtheadvector -fsyntax-only -verify #include diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/rvv-example-memcpy.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/rvv-example-memcpy.c index 0da0fe0447788a..cb7f873aae1614 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/rvv-example-memcpy.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/rvv-example-memcpy.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -O0 -emit-llvm %s -o - | FileCheck %s @@ -5,62 +6,58 @@ typedef unsigned char uint8_t; -// CHECK-LABEL: define dso_local void @memcpy_v(ptr noundef %dst, ptr noundef %src, i32 noundef signext %n) #0 { -// CHECK-NEXT: entry: -// CHECK-NEXT: %dst.addr = alloca ptr, align 8 -// CHECK-NEXT: %src.addr = alloca ptr, align 8 -// CHECK-NEXT: %n.addr = alloca i32, align 4 -// CHECK-NEXT: %vl = alloca i32, align 4 -// CHECK-NEXT: %vec_src = alloca , align 1 -// CHECK-NEXT: store ptr %dst, ptr %dst.addr, align 8 -// CHECK-NEXT: store ptr %src, ptr %src.addr, align 8 -// CHECK-NEXT: store i32 %n, ptr %n.addr, align 4 -// CHECK-NEXT: br label %for.cond +// CHECK-LABEL: define dso_local void @memcpy_v +// CHECK-SAME: (ptr noundef [[DST:%.*]], ptr noundef [[SRC:%.*]], i32 noundef signext [[N:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-NEXT: entry: +// CHECK-NEXT: [[DST_ADDR:%.*]] = alloca ptr, align 8 +// CHECK-NEXT: [[SRC_ADDR:%.*]] = alloca ptr, align 8 +// CHECK-NEXT: [[N_ADDR:%.*]] = alloca i32, align 4 +// CHECK-NEXT: [[VL:%.*]] = alloca i32, align 4 +// CHECK-NEXT: [[VEC_SRC:%.*]] = alloca , align 1 +// CHECK-NEXT: store ptr [[DST]], ptr [[DST_ADDR]], align 8 +// CHECK-NEXT: store ptr [[SRC]], ptr [[SRC_ADDR]], align 8 +// CHECK-NEXT: store i32 [[N]], ptr [[N_ADDR]], align 4 +// CHECK-NEXT: br label [[FOR_COND:%.*]] +// CHECK: for.cond: +// CHECK-NEXT: [[TMP0:%.*]] = load i32, ptr [[N_ADDR]], align 4 +// CHECK-NEXT: [[CMP:%.*]] = icmp sgt i32 [[TMP0]], 0 +// CHECK-NEXT: br i1 [[CMP]], label [[FOR_BODY:%.*]], label [[FOR_END:%.*]] +// CHECK: for.body: +// CHECK-NEXT: [[TMP1:%.*]] = load i32, ptr [[N_ADDR]], align 4 +// CHECK-NEXT: [[CONV:%.*]] = sext i32 [[TMP1]] to i64 +// CHECK-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.th.vsetvl.i64(i64 [[CONV]], i64 0, i64 2) +// CHECK-NEXT: [[CONV1:%.*]] = trunc i64 [[TMP2]] to i32 +// CHECK-NEXT: store i32 [[CONV1]], ptr [[VL]], align 4 +// CHECK-NEXT: [[TMP3:%.*]] = load ptr, ptr [[SRC_ADDR]], align 8 +// CHECK-NEXT: [[TMP4:%.*]] = load i32, ptr [[VL]], align 4 +// CHECK-NEXT: [[CONV2:%.*]] = sext i32 [[TMP4]] to i64 +// CHECK-NEXT: [[TMP5:%.*]] = call @llvm.riscv.th.vle.nxv32i8.i64( poison, ptr [[TMP3]], i64 [[CONV2]]) +// CHECK-NEXT: store [[TMP5]], ptr [[VEC_SRC]], align 1 +// CHECK-NEXT: [[TMP6:%.*]] = load ptr, ptr [[DST_ADDR]], align 8 +// CHECK-NEXT: [[TMP7:%.*]] = load , ptr [[VEC_SRC]], align 1 +// CHECK-NEXT: [[TMP8:%.*]] = load i32, ptr [[VL]], align 4 +// CHECK-NEXT: [[CONV3:%.*]] = sext i32 [[TMP8]] to i64 +// CHECK-NEXT: call void @llvm.riscv.th.vse.nxv32i8.i64( [[TMP7]], ptr [[TMP6]], i64 [[CONV3]]) +// CHECK-NEXT: br label [[FOR_INC:%.*]] +// CHECK: for.inc: +// CHECK-NEXT: [[TMP9:%.*]] = load i32, ptr [[VL]], align 4 +// CHECK-NEXT: [[TMP10:%.*]] = load i32, ptr [[N_ADDR]], align 4 +// CHECK-NEXT: [[SUB:%.*]] = sub nsw i32 [[TMP10]], [[TMP9]] +// CHECK-NEXT: store i32 [[SUB]], ptr [[N_ADDR]], align 4 +// CHECK-NEXT: [[TMP11:%.*]] = load i32, ptr [[VL]], align 4 +// CHECK-NEXT: [[TMP12:%.*]] = load ptr, ptr [[SRC_ADDR]], align 8 +// CHECK-NEXT: [[IDX_EXT:%.*]] = sext i32 [[TMP11]] to i64 +// CHECK-NEXT: [[ADD_PTR:%.*]] = getelementptr inbounds i8, ptr [[TMP12]], i64 [[IDX_EXT]] +// CHECK-NEXT: store ptr [[ADD_PTR]], ptr [[SRC_ADDR]], align 8 +// CHECK-NEXT: [[TMP13:%.*]] = load i32, ptr [[VL]], align 4 +// CHECK-NEXT: [[TMP14:%.*]] = load ptr, ptr [[DST_ADDR]], align 8 +// CHECK-NEXT: [[IDX_EXT4:%.*]] = sext i32 [[TMP13]] to i64 +// CHECK-NEXT: [[ADD_PTR5:%.*]] = getelementptr inbounds i8, ptr [[TMP14]], i64 [[IDX_EXT4]] +// CHECK-NEXT: store ptr [[ADD_PTR5]], ptr [[DST_ADDR]], align 8 +// CHECK-NEXT: br label [[FOR_COND]], !llvm.loop [[LOOP4:![0-9]+]] +// CHECK: for.end: +// CHECK-NEXT: ret void // -// CHECK-LABEL: for.cond: ; preds = %for.inc, %entry -// CHECK-NEXT: %0 = load i32, ptr %n.addr, align 4 -// CHECK-NEXT: %cmp = icmp sgt i32 %0, 0 -// CHECK-NEXT: br i1 %cmp, label %for.body, label %for.end -// -// CHECK-LABEL: for.body: ; preds = %for.cond -// CHECK-NEXT: %1 = load i32, ptr %n.addr, align 4 -// CHECK-NEXT: %conv = sext i32 %1 to i64 -// CHECK-NEXT: %2 = call i64 @llvm.riscv.th.vsetvl.i64(i64 %conv, i64 0, i64 2) -// CHECK-NEXT: %conv1 = trunc i64 %2 to i32 -// CHECK-NEXT: store i32 %conv1, ptr %vl, align 4 -// CHECK-NEXT: %3 = load ptr, ptr %src.addr, align 8 -// CHECK-NEXT: %4 = load i32, ptr %vl, align 4 -// CHECK-NEXT: %conv2 = sext i32 %4 to i64 -// CHECK-NEXT: %5 = call @llvm.riscv.th.vle.nxv32i8.i64( poison, ptr %3, i64 %conv2) -// CHECK-NEXT: store %5, ptr %vec_src, align 1 -// CHECK-NEXT: %6 = load ptr, ptr %dst.addr, align 8 -// CHECK-NEXT: %7 = load , ptr %vec_src, align 1 -// CHECK-NEXT: %8 = load i32, ptr %vl, align 4 -// CHECK-NEXT: %conv3 = sext i32 %8 to i64 -// CHECK-NEXT: call void @llvm.riscv.th.vse.nxv32i8.i64( %7, ptr %6, i64 %conv3) -// CHECK-NEXT: br label %for.inc -// -// CHECK-LABEL: for.inc: ; preds = %for.body -// CHECK-NEXT: %9 = load i32, ptr %vl, align 4 -// CHECK-NEXT: %10 = load i32, ptr %n.addr, align 4 -// CHECK-NEXT: %sub = sub nsw i32 %10, %9 -// CHECK-NEXT: store i32 %sub, ptr %n.addr, align 4 -// CHECK-NEXT: %11 = load i32, ptr %vl, align 4 -// CHECK-NEXT: %12 = load ptr, ptr %src.addr, align 8 -// CHECK-NEXT: %idx.ext = sext i32 %11 to i64 -// CHECK-NEXT: %add.ptr = getelementptr inbounds i8, ptr %12, i64 %idx.ext -// CHECK-NEXT: store ptr %add.ptr, ptr %src.addr, align 8 -// CHECK-NEXT: %13 = load i32, ptr %vl, align 4 -// CHECK-NEXT: %14 = load ptr, ptr %dst.addr, align 8 -// CHECK-NEXT: %idx.ext4 = sext i32 %13 to i64 -// CHECK-NEXT: %add.ptr5 = getelementptr inbounds i8, ptr %14, i64 %idx.ext4 -// CHECK-NEXT: store ptr %add.ptr5, ptr %dst.addr, align 8 -// CHECK-NEXT: br label %for.cond, !llvm.loop !4 -// -// CHECK-LABEL: for.end: ; preds = %for.cond -// CHECK-NEXT: ret void -// CHECK-NEXT: } - void memcpy_v(uint8_t *dst, const uint8_t *src, int n) { for (int vl; n > 0; n -= vl, src += vl, dst += vl) { vl = __riscv_vsetvl_e8m4(n); diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsb.c index 465c29a7fb5039..c208134d060239 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsb.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsb.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlsb_v_i8m1(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlsb_v_i8m2(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlsb_v_i8m4(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlsb_v_i8m8(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlsb_v_i16m1(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlsb_v_i16m2(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlsb_v_i16m4(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlsb_v_i16m8(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlsb_v_i32m1(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlsb_v_i32m2(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlsb_v_i32m4(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlsb_v_i32m8(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlsb_v_i64m1(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlsb_v_i64m2(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlsb_v_i64m4(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsbu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsbu.c index 918912ce4878d5..070737973367b0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsbu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsbu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlsbu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlsbu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlsbu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlsbu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlsbu_v_u16m1(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlsbu_v_u16m2(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlsbu_v_u16m4(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlsbu_v_u16m8(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlsbu_v_u32m1(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlsbu_v_u32m2(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlsbu_v_u32m4(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlsbu_v_u32m8(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlsbu_v_u64m1(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlsbu_v_u64m2(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlsbu_v_u64m4(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse16.c index 6a9788d845c0e6..fdf2d9b33ffe0f 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_th_vlse16_v_f16m1(const _Float16 *base, ptrdiff_t stride, size } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m2_t test_th_vlse16_v_f16m2(const _Float16 *base, ptrdiff_t stride, size } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m4_t test_th_vlse16_v_f16m4(const _Float16 *base, ptrdiff_t stride, size } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m8_t test_th_vlse16_v_f16m8(const _Float16 *base, ptrdiff_t stride, size } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlse16_v_i16m1(const int16_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlse16_v_i16m2(const int16_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlse16_v_i16m4(const int16_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlse16_v_i16m8(const int16_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_th_vlse16_v_u16m1(const uint16_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m2_t test_th_vlse16_v_u16m2(const uint16_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m4_t test_th_vlse16_v_u16m4(const uint16_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse32.c index 0200b2071f4df3..7bf2805cab435c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1_t test_th_vlse32_v_f32m1(const float *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat32m2_t test_th_vlse32_v_f32m2(const float *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat32m4_t test_th_vlse32_v_f32m4(const float *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat32m8_t test_th_vlse32_v_f32m8(const float *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint32m1_t test_th_vlse32_v_i32m1(const int32_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint32m2_t test_th_vlse32_v_i32m2(const int32_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint32m4_t test_th_vlse32_v_i32m4(const int32_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint32m8_t test_th_vlse32_v_i32m8(const int32_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlse32_v_u32m1(const uint32_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlse32_v_u32m2(const uint32_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlse32_v_u32m4(const uint32_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse64.c index ecaf41f95c777d..38cc2c6333452e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat64m1_t test_th_vlse64_v_f64m1(const double *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat64m2_t test_th_vlse64_v_f64m2(const double *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat64m4_t test_th_vlse64_v_f64m4(const double *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat64m8_t test_th_vlse64_v_f64m8(const double *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint64m1_t test_th_vlse64_v_i64m1(const int64_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint64m2_t test_th_vlse64_v_i64m2(const int64_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint64m4_t test_th_vlse64_v_i64m4(const int64_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint64m8_t test_th_vlse64_v_i64m8(const int64_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint64m1_t test_th_vlse64_v_u64m1(const uint64_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint64m2_t test_th_vlse64_v_u64m2(const uint64_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint64m4_t test_th_vlse64_v_u64m4(const uint64_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse8.c index bc302566ccaa03..63b7f583969443 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlse8_v_i8m1(const int8_t *base, ptrdiff_t stride, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlse8_v_i8m2(const int8_t *base, ptrdiff_t stride, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlse8_v_i8m4(const int8_t *base, ptrdiff_t stride, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlse8_v_i8m8(const int8_t *base, ptrdiff_t stride, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m1_t test_th_vlse8_v_u8m1(const uint8_t *base, ptrdiff_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m2_t test_th_vlse8_v_u8m2(const uint8_t *base, ptrdiff_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m4_t test_th_vlse8_v_u8m4(const uint8_t *base, ptrdiff_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsh.c index dd62c13ef10e01..3366fe1856d3b0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsh.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsh.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlsh_v_i8m1(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlsh_v_i8m2(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlsh_v_i8m4(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlsh_v_i8m8(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlsh_v_i16m1(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlsh_v_i16m2(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlsh_v_i16m4(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlsh_v_i16m8(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlsh_v_i32m1(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlsh_v_i32m2(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlsh_v_i32m4(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlsh_v_i32m8(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlsh_v_i64m1(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlsh_v_i64m2(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlsh_v_i64m4(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlshu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlshu.c index 5ead421d53157f..9f4ff316690da1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlshu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlshu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlshu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlshu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlshu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlshu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlshu_v_u16m1(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlshu_v_u16m2(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlshu_v_u16m4(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlshu_v_u16m8(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlshu_v_u32m1(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlshu_v_u32m2(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlshu_v_u32m4(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlshu_v_u32m8(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlshu_v_u64m1(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlshu_v_u64m2(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlshu_v_u64m4(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsw.c index e1848d522e47a8..60a53843c07adf 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsw.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsw.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlsw_v_i8m1(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlsw_v_i8m2(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlsw_v_i8m4(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlsw_v_i8m8(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlsw_v_i16m1(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlsw_v_i16m2(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlsw_v_i16m4(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlsw_v_i16m8(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlsw_v_i32m1(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlsw_v_i32m2(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlsw_v_i32m4(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlsw_v_i32m8(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlsw_v_i64m1(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlsw_v_i64m2(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlsw_v_i64m4(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlswu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlswu.c index 3a3ca7bb270a7d..11f1e20dc5c883 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlswu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlswu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlswu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlswu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlswu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlswu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlswu_v_u16m1(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlswu_v_u16m2(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlswu_v_u16m4(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlswu_v_u16m8(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlswu_v_u32m1(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlswu_v_u32m2(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlswu_v_u32m4(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlswu_v_u32m8(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlswu_v_u64m1(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlswu_v_u64m2(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlswu_v_u64m4(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssb.c index 8fb77fd9e55082..ada667f0b070d3 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssb.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssb.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vssb_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vssb_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vssb_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vssb_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vssb_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vssb_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vssb_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vssb_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vssb_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vssb_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vssb_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vssb_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vssb_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vssb_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vssb_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vssb_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vssb_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vssb_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vssb_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vssb_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vssb_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vssb_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vssb_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vssb_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vssb_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vssb_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vssb_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vssb_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vssb_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vssb_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vssb_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse16.c index 9fb3ef4c470e4a..3b5fbafe6545c4 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsse16_v_f16m1(_Float16 *base, ptrdiff_t stride, vfloat16m1_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsse16_v_f16m2(_Float16 *base, ptrdiff_t stride, vfloat16m2_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsse16_v_f16m4(_Float16 *base, ptrdiff_t stride, vfloat16m4_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsse16_v_f16m8(_Float16 *base, ptrdiff_t stride, vfloat16m8_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsse16_v_i16m1(int16_t *base, ptrdiff_t stride, vint16m1_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsse16_v_i16m2(int16_t *base, ptrdiff_t stride, vint16m2_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsse16_v_i16m4(int16_t *base, ptrdiff_t stride, vint16m4_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsse16_v_i16m8(int16_t *base, ptrdiff_t stride, vint16m8_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsse16_v_u16m1(uint16_t *base, ptrdiff_t stride, vuint16m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsse16_v_u16m2(uint16_t *base, ptrdiff_t stride, vuint16m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsse16_v_u16m4(uint16_t *base, ptrdiff_t stride, vuint16m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse32.c index f9459863e8c1ed..bcf328527a7bd1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsse32_v_f32m1(float *base, ptrdiff_t stride, vfloat32m1_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsse32_v_f32m2(float *base, ptrdiff_t stride, vfloat32m2_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsse32_v_f32m4(float *base, ptrdiff_t stride, vfloat32m4_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsse32_v_f32m8(float *base, ptrdiff_t stride, vfloat32m8_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsse32_v_i32m1(int32_t *base, ptrdiff_t stride, vint32m1_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsse32_v_i32m2(int32_t *base, ptrdiff_t stride, vint32m2_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsse32_v_i32m4(int32_t *base, ptrdiff_t stride, vint32m4_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsse32_v_i32m8(int32_t *base, ptrdiff_t stride, vint32m8_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsse32_v_u32m1(uint32_t *base, ptrdiff_t stride, vuint32m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsse32_v_u32m2(uint32_t *base, ptrdiff_t stride, vuint32m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsse32_v_u32m4(uint32_t *base, ptrdiff_t stride, vuint32m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse64.c index a8bd79c3273276..70c70c83cc053b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsse64_v_f64m1(double *base, ptrdiff_t stride, vfloat64m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsse64_v_f64m2(double *base, ptrdiff_t stride, vfloat64m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsse64_v_f64m4(double *base, ptrdiff_t stride, vfloat64m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsse64_v_f64m8(double *base, ptrdiff_t stride, vfloat64m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsse64_v_i64m1(int64_t *base, ptrdiff_t stride, vint64m1_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsse64_v_i64m2(int64_t *base, ptrdiff_t stride, vint64m2_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsse64_v_i64m4(int64_t *base, ptrdiff_t stride, vint64m4_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsse64_v_i64m8(int64_t *base, ptrdiff_t stride, vint64m8_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsse64_v_u64m1(uint64_t *base, ptrdiff_t stride, vuint64m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsse64_v_u64m2(uint64_t *base, ptrdiff_t stride, vuint64m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsse64_v_u64m4(uint64_t *base, ptrdiff_t stride, vuint64m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse8.c index 5c15c20c0007e8..9ef49aa9b7d66f 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsse8_v_i8m1(int8_t *base, ptrdiff_t stride, vint8m1_t value, size_ } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsse8_v_i8m2(int8_t *base, ptrdiff_t stride, vint8m2_t value, size_ } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsse8_v_i8m4(int8_t *base, ptrdiff_t stride, vint8m4_t value, size_ } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsse8_v_i8m8(int8_t *base, ptrdiff_t stride, vint8m8_t value, size_ } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsse8_v_u8m1(uint8_t *base, ptrdiff_t stride, vuint8m1_t value, siz } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsse8_v_u8m2(uint8_t *base, ptrdiff_t stride, vuint8m2_t value, siz } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsse8_v_u8m4(uint8_t *base, ptrdiff_t stride, vuint8m4_t value, siz } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssh.c index f077c768e61393..f60998b0a6ba2e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssh.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssh.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vssh_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vssh_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vssh_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vssh_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vssh_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vssh_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vssh_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vssh_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vssh_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vssh_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vssh_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vssh_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vssh_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vssh_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vssh_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vssh_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vssh_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vssh_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vssh_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vssh_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vssh_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vssh_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vssh_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vssh_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vssh_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vssh_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vssh_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vssh_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vssh_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vssh_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vssh_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssw.c index a0e883b97e3d50..0f8c16124e647e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssw.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssw.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vssw_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vssw_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vssw_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vssw_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vssw_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vssw_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vssw_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vssw_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vssw_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vssw_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vssw_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vssw_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vssw_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vssw_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vssw_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vssw_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vssw_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vssw_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vssw_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vssw_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vssw_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vssw_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vssw_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vssw_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vssw_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vssw_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vssw_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vssw_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vssw_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vssw_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vssw_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsb.c index 0ab9e8974e0ed6..95fdf5259cecd8 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsb.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsb.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlsb_v_i8m1(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlsb_v_i8m2(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlsb_v_i8m4(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlsb_v_i8m8(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlsb_v_i16m1(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlsb_v_i16m2(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlsb_v_i16m4(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlsb_v_i16m8(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlsb_v_i32m1(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlsb_v_i32m2(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlsb_v_i32m4(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlsb_v_i32m8(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlsb_v_i64m1(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlsb_v_i64m2(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlsb_v_i64m4(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsbu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsbu.c index 0d2e2630677226..89493a8ef360db 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsbu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsbu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlsbu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlsbu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlsbu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlsbu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlsbu_v_u16m1(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlsbu_v_u16m2(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlsbu_v_u16m4(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlsbu_v_u16m8(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlsbu_v_u32m1(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlsbu_v_u32m2(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlsbu_v_u32m4(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlsbu_v_u32m8(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlsbu_v_u64m1(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlsbu_v_u64m2(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlsbu_v_u64m4(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse16.c index a5c80b8886c0cb..d22708b230d8d3 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_th_vlse16_v_f16m1(const _Float16 *base, ptrdiff_t stride, size } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m2_t test_th_vlse16_v_f16m2(const _Float16 *base, ptrdiff_t stride, size } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m4_t test_th_vlse16_v_f16m4(const _Float16 *base, ptrdiff_t stride, size } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m8_t test_th_vlse16_v_f16m8(const _Float16 *base, ptrdiff_t stride, size } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlse16_v_i16m1(const int16_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlse16_v_i16m2(const int16_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlse16_v_i16m4(const int16_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlse16_v_i16m8(const int16_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_th_vlse16_v_u16m1(const uint16_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m2_t test_th_vlse16_v_u16m2(const uint16_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m4_t test_th_vlse16_v_u16m4(const uint16_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse32.c index 0c69d4be25b316..03e55a561adec5 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1_t test_th_vlse32_v_f32m1(const float *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat32m2_t test_th_vlse32_v_f32m2(const float *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat32m4_t test_th_vlse32_v_f32m4(const float *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat32m8_t test_th_vlse32_v_f32m8(const float *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint32m1_t test_th_vlse32_v_i32m1(const int32_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint32m2_t test_th_vlse32_v_i32m2(const int32_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint32m4_t test_th_vlse32_v_i32m4(const int32_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint32m8_t test_th_vlse32_v_i32m8(const int32_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlse32_v_u32m1(const uint32_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlse32_v_u32m2(const uint32_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlse32_v_u32m4(const uint32_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse64.c index 47a29309acd880..669988c4eca8cd 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat64m1_t test_th_vlse64_v_f64m1(const double *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat64m2_t test_th_vlse64_v_f64m2(const double *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat64m4_t test_th_vlse64_v_f64m4(const double *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat64m8_t test_th_vlse64_v_f64m8(const double *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint64m1_t test_th_vlse64_v_i64m1(const int64_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint64m2_t test_th_vlse64_v_i64m2(const int64_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint64m4_t test_th_vlse64_v_i64m4(const int64_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint64m8_t test_th_vlse64_v_i64m8(const int64_t *base, ptrdiff_t stride, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint64m1_t test_th_vlse64_v_u64m1(const uint64_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint64m2_t test_th_vlse64_v_u64m2(const uint64_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint64m4_t test_th_vlse64_v_u64m4(const uint64_t *base, ptrdiff_t stride, size_ } // CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse8.c index 711d12df685ed1..2073d3691aa480 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlse8_v_i8m1(const int8_t *base, ptrdiff_t stride, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlse8_v_i8m2(const int8_t *base, ptrdiff_t stride, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlse8_v_i8m4(const int8_t *base, ptrdiff_t stride, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlse8_v_i8m8(const int8_t *base, ptrdiff_t stride, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m1_t test_th_vlse8_v_u8m1(const uint8_t *base, ptrdiff_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m2_t test_th_vlse8_v_u8m2(const uint8_t *base, ptrdiff_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m4_t test_th_vlse8_v_u8m4(const uint8_t *base, ptrdiff_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsh.c index f8aba0aa063f12..d451517b51c464 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsh.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsh.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlsh_v_i8m1(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlsh_v_i8m2(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlsh_v_i8m4(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlsh_v_i8m8(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlsh_v_i16m1(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlsh_v_i16m2(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlsh_v_i16m4(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlsh_v_i16m8(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlsh_v_i32m1(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlsh_v_i32m2(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlsh_v_i32m4(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlsh_v_i32m8(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlsh_v_i64m1(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlsh_v_i64m2(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlsh_v_i64m4(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlshu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlshu.c index 5cdeff3d2b16d1..72dd55c5b26355 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlshu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlshu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlshu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlshu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlshu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlshu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlshu_v_u16m1(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlshu_v_u16m2(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlshu_v_u16m4(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlshu_v_u16m8(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlshu_v_u32m1(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlshu_v_u32m2(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlshu_v_u32m4(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlshu_v_u32m8(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlshu_v_u64m1(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlshu_v_u64m2(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlshu_v_u64m4(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsw.c index 0f0f854d4b75cc..0ff172f7d06a7b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsw.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsw.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlsw_v_i8m1(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlsw_v_i8m2(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlsw_v_i8m4(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlsw_v_i8m8(const int8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlsw_v_i16m1(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlsw_v_i16m2(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlsw_v_i16m4(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlsw_v_i16m8(const int16_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlsw_v_i32m1(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlsw_v_i32m2(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlsw_v_i32m4(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlsw_v_i32m8(const int32_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlsw_v_i64m1(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlsw_v_i64m2(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlsw_v_i64m4(const int64_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlswu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlswu.c index 61e52fb2876300..1f73ca30d32581 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlswu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlswu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlswu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlswu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlswu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlswu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlswu_v_u16m1(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlswu_v_u16m2(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlswu_v_u16m4(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlswu_v_u16m8(const uint16_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlswu_v_u32m1(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlswu_v_u32m2(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlswu_v_u32m4(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlswu_v_u32m8(const uint32_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlswu_v_u64m1(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlswu_v_u64m2(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlswu_v_u64m4(const uint64_t *base, size_t stride, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssb.c index 29f8d9a725c4dd..d6d1f716a38390 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssb.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssb.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vssb_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vssb_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vssb_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vssb_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vssb_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vssb_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vssb_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vssb_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vssb_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vssb_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vssb_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vssb_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vssb_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vssb_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vssb_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vssb_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vssb_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vssb_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vssb_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vssb_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vssb_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vssb_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vssb_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vssb_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vssb_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vssb_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vssb_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vssb_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vssb_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vssb_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vssb_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse16.c index a778e8c6adddf7..1acec487abcdd1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsse16_v_f16m1(_Float16 *base, ptrdiff_t stride, vfloat16m1_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsse16_v_f16m2(_Float16 *base, ptrdiff_t stride, vfloat16m2_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsse16_v_f16m4(_Float16 *base, ptrdiff_t stride, vfloat16m4_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsse16_v_f16m8(_Float16 *base, ptrdiff_t stride, vfloat16m8_t value } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsse16_v_i16m1(int16_t *base, ptrdiff_t stride, vint16m1_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsse16_v_i16m2(int16_t *base, ptrdiff_t stride, vint16m2_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsse16_v_i16m4(int16_t *base, ptrdiff_t stride, vint16m4_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsse16_v_i16m8(int16_t *base, ptrdiff_t stride, vint16m8_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsse16_v_u16m1(uint16_t *base, ptrdiff_t stride, vuint16m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsse16_v_u16m2(uint16_t *base, ptrdiff_t stride, vuint16m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsse16_v_u16m4(uint16_t *base, ptrdiff_t stride, vuint16m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse32.c index 949cfd8ede2060..4889de1ed431bf 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsse32_v_f32m1(float *base, ptrdiff_t stride, vfloat32m1_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsse32_v_f32m2(float *base, ptrdiff_t stride, vfloat32m2_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsse32_v_f32m4(float *base, ptrdiff_t stride, vfloat32m4_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsse32_v_f32m8(float *base, ptrdiff_t stride, vfloat32m8_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsse32_v_i32m1(int32_t *base, ptrdiff_t stride, vint32m1_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsse32_v_i32m2(int32_t *base, ptrdiff_t stride, vint32m2_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsse32_v_i32m4(int32_t *base, ptrdiff_t stride, vint32m4_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsse32_v_i32m8(int32_t *base, ptrdiff_t stride, vint32m8_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsse32_v_u32m1(uint32_t *base, ptrdiff_t stride, vuint32m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsse32_v_u32m2(uint32_t *base, ptrdiff_t stride, vuint32m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsse32_v_u32m4(uint32_t *base, ptrdiff_t stride, vuint32m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse64.c index f76b6951952574..1128b89ff5a9a7 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsse64_v_f64m1(double *base, ptrdiff_t stride, vfloat64m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsse64_v_f64m2(double *base, ptrdiff_t stride, vfloat64m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsse64_v_f64m4(double *base, ptrdiff_t stride, vfloat64m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsse64_v_f64m8(double *base, ptrdiff_t stride, vfloat64m8_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsse64_v_i64m1(int64_t *base, ptrdiff_t stride, vint64m1_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsse64_v_i64m2(int64_t *base, ptrdiff_t stride, vint64m2_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsse64_v_i64m4(int64_t *base, ptrdiff_t stride, vint64m4_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsse64_v_i64m8(int64_t *base, ptrdiff_t stride, vint64m8_t value, s } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsse64_v_u64m1(uint64_t *base, ptrdiff_t stride, vuint64m1_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsse64_v_u64m2(uint64_t *base, ptrdiff_t stride, vuint64m2_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsse64_v_u64m4(uint64_t *base, ptrdiff_t stride, vuint64m4_t value, } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse8.c index 106770712fc5a4..a36af3f7e71f7c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsse8_v_i8m1(int8_t *base, ptrdiff_t stride, vint8m1_t value, size_ } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsse8_v_i8m2(int8_t *base, ptrdiff_t stride, vint8m2_t value, size_ } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsse8_v_i8m4(int8_t *base, ptrdiff_t stride, vint8m4_t value, size_ } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsse8_v_i8m8(int8_t *base, ptrdiff_t stride, vint8m8_t value, size_ } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsse8_v_u8m1(uint8_t *base, ptrdiff_t stride, vuint8m1_t value, siz } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsse8_v_u8m2(uint8_t *base, ptrdiff_t stride, vuint8m2_t value, siz } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsse8_v_u8m4(uint8_t *base, ptrdiff_t stride, vuint8m4_t value, siz } // CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssh.c index c6948023c4c4be..6af72eceacc325 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssh.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssh.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vssh_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vssh_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vssh_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vssh_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vssh_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vssh_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vssh_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vssh_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vssh_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vssh_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vssh_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vssh_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vssh_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vssh_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vssh_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vssh_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vssh_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vssh_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vssh_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vssh_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vssh_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vssh_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vssh_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vssh_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vssh_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vssh_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vssh_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vssh_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vssh_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vssh_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vssh_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssw.c index 46d0c0bc20ae13..f65f3c672b9502 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssw.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssw.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vssw_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vssw_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vssw_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vssw_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vssw_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vssw_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vssw_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vssw_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vssw_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vssw_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vssw_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vssw_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vssw_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vssw_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vssw_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vssw_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vssw_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vssw_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vssw_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vssw_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vssw_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vssw_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vssw_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vssw_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vssw_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vssw_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vssw_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vssw_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vssw_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vssw_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vssw_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size } // CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle16ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle16ff.c index 4a43624ef88982..c9606e1a0f7322 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle16ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle16ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -23,7 +24,7 @@ vint16m1_t test_th_vle16ff_v_i16m1(const int16_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -36,7 +37,7 @@ vint16m2_t test_th_vle16ff_v_i16m2(const int16_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -49,7 +50,7 @@ vint16m4_t test_th_vle16ff_v_i16m4(const int16_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -62,7 +63,7 @@ vint16m8_t test_th_vle16ff_v_i16m8(const int16_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -75,7 +76,7 @@ vuint16m1_t test_th_vle16ff_v_u16m1(const uint16_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -88,7 +89,7 @@ vuint16m2_t test_th_vle16ff_v_u16m2(const uint16_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -101,7 +102,7 @@ vuint16m4_t test_th_vle16ff_v_u16m4(const uint16_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -114,7 +115,7 @@ vuint16m8_t test_th_vle16ff_v_u16m8(const uint16_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_f16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -127,7 +128,7 @@ vfloat16m1_t test_th_vle16ff_v_f16m1(const float16_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -140,7 +141,7 @@ vfloat16m2_t test_th_vle16ff_v_f16m2(const float16_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -153,7 +154,7 @@ vfloat16m4_t test_th_vle16ff_v_f16m4(const float16_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv32f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle32ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle32ff.c index d8035e11e6db56..afc4a62f1dddfd 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle32ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle32ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -23,7 +24,7 @@ vint32m1_t test_th_vle32ff_v_i32m1(const int32_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -36,7 +37,7 @@ vint32m2_t test_th_vle32ff_v_i32m2(const int32_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -49,7 +50,7 @@ vint32m4_t test_th_vle32ff_v_i32m4(const int32_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -62,7 +63,7 @@ vint32m8_t test_th_vle32ff_v_i32m8(const int32_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -75,7 +76,7 @@ vuint32m1_t test_th_vle32ff_v_u32m1(const uint32_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -88,7 +89,7 @@ vuint32m2_t test_th_vle32ff_v_u32m2(const uint32_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -101,7 +102,7 @@ vuint32m4_t test_th_vle32ff_v_u32m4(const uint32_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -115,7 +116,7 @@ vuint32m8_t test_th_vle32ff_v_u32m8(const uint32_t *base, size_t *new_vl, size_t // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_f32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv2f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -128,7 +129,7 @@ vfloat32m1_t test_th_vle32ff_v_f32m1(const float32_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -141,7 +142,7 @@ vfloat32m2_t test_th_vle32ff_v_f32m2(const float32_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -154,7 +155,7 @@ vfloat32m4_t test_th_vle32ff_v_f32m4(const float32_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle64ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle64ff.c index 3b13e4f3499c47..2e5e34abd7bc86 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle64ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle64ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -23,7 +24,7 @@ vint64m1_t test_th_vle64ff_v_i64m1(const int64_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -36,7 +37,7 @@ vint64m2_t test_th_vle64ff_v_i64m2(const int64_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -49,7 +50,7 @@ vint64m4_t test_th_vle64ff_v_i64m4(const int64_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -62,7 +63,7 @@ vint64m8_t test_th_vle64ff_v_i64m8(const int64_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -75,7 +76,7 @@ vuint64m1_t test_th_vle64ff_v_u64m1(const uint64_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -88,7 +89,7 @@ vuint64m2_t test_th_vle64ff_v_u64m2(const uint64_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -101,7 +102,7 @@ vuint64m4_t test_th_vle64ff_v_u64m4(const uint64_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -115,7 +116,7 @@ vuint64m8_t test_th_vle64ff_v_u64m8(const uint64_t *base, size_t *new_vl, size_t // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_f64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv1f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -128,7 +129,7 @@ vfloat64m1_t test_th_vle64ff_v_f64m1(const float64_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv2f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -141,7 +142,7 @@ vfloat64m2_t test_th_vle64ff_v_f64m2(const float64_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -154,7 +155,7 @@ vfloat64m4_t test_th_vle64ff_v_f64m4(const float64_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle8ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle8ff.c index 349fba5ee5db02..cf48f0b7025035 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle8ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/thead/vle8ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -19,7 +20,7 @@ vint8m1_t test_th_vle8ff_v_i8m1(const int8_t *base, size_t *new_vl, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -32,7 +33,7 @@ vint8m2_t test_th_vle8ff_v_i8m2(const int8_t *base, size_t *new_vl, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -45,7 +46,7 @@ vint8m4_t test_th_vle8ff_v_i8m4(const int8_t *base, size_t *new_vl, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -58,7 +59,7 @@ vint8m8_t test_th_vle8ff_v_i8m8(const int8_t *base, size_t *new_vl, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -71,7 +72,7 @@ vuint8m1_t test_th_vle8ff_v_u8m1(const uint8_t *base, size_t *new_vl, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -84,7 +85,7 @@ vuint8m2_t test_th_vle8ff_v_u8m2(const uint8_t *base, size_t *new_vl, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -97,7 +98,7 @@ vuint8m4_t test_th_vle8ff_v_u8m4(const uint8_t *base, size_t *new_vl, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle16ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle16ff.c index aff66bcfcedbc3..392873e1af8b8a 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle16ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle16ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -23,7 +24,7 @@ vint16m1_t test_th_vle16ff_v_i16m1(const int16_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -36,7 +37,7 @@ vint16m2_t test_th_vle16ff_v_i16m2(const int16_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -49,7 +50,7 @@ vint16m4_t test_th_vle16ff_v_i16m4(const int16_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -62,7 +63,7 @@ vint16m8_t test_th_vle16ff_v_i16m8(const int16_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -75,7 +76,7 @@ vuint16m1_t test_th_vle16ff_v_u16m1(const uint16_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -88,7 +89,7 @@ vuint16m2_t test_th_vle16ff_v_u16m2(const uint16_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -101,7 +102,7 @@ vuint16m4_t test_th_vle16ff_v_u16m4(const uint16_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -114,7 +115,7 @@ vuint16m8_t test_th_vle16ff_v_u16m8(const uint16_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_f16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -127,7 +128,7 @@ vfloat16m1_t test_th_vle16ff_v_f16m1(const float16_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -140,7 +141,7 @@ vfloat16m2_t test_th_vle16ff_v_f16m2(const float16_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -153,7 +154,7 @@ vfloat16m4_t test_th_vle16ff_v_f16m4(const float16_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle16ff_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv32f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle32ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle32ff.c index be1e34993ecf30..c915b1a91ee5be 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle32ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle32ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -23,7 +24,7 @@ vint32m1_t test_th_vle32ff_v_i32m1(const int32_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -36,7 +37,7 @@ vint32m2_t test_th_vle32ff_v_i32m2(const int32_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -49,7 +50,7 @@ vint32m4_t test_th_vle32ff_v_i32m4(const int32_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -62,7 +63,7 @@ vint32m8_t test_th_vle32ff_v_i32m8(const int32_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -75,7 +76,7 @@ vuint32m1_t test_th_vle32ff_v_u32m1(const uint32_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -88,7 +89,7 @@ vuint32m2_t test_th_vle32ff_v_u32m2(const uint32_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -101,7 +102,7 @@ vuint32m4_t test_th_vle32ff_v_u32m4(const uint32_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -115,7 +116,7 @@ vuint32m8_t test_th_vle32ff_v_u32m8(const uint32_t *base, size_t *new_vl, size_t // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_f32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv2f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -128,7 +129,7 @@ vfloat32m1_t test_th_vle32ff_v_f32m1(const float32_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -141,7 +142,7 @@ vfloat32m2_t test_th_vle32ff_v_f32m2(const float32_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -154,7 +155,7 @@ vfloat32m4_t test_th_vle32ff_v_f32m4(const float32_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle32ff_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle64ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle64ff.c index 0e3fd4003aa0da..8651d84e86f6ca 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle64ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle64ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -23,7 +24,7 @@ vint64m1_t test_th_vle64ff_v_i64m1(const int64_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -36,7 +37,7 @@ vint64m2_t test_th_vle64ff_v_i64m2(const int64_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -49,7 +50,7 @@ vint64m4_t test_th_vle64ff_v_i64m4(const int64_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -62,7 +63,7 @@ vint64m8_t test_th_vle64ff_v_i64m8(const int64_t *base, size_t *new_vl, size_t v } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -75,7 +76,7 @@ vuint64m1_t test_th_vle64ff_v_u64m1(const uint64_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -88,7 +89,7 @@ vuint64m2_t test_th_vle64ff_v_u64m2(const uint64_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -101,7 +102,7 @@ vuint64m4_t test_th_vle64ff_v_u64m4(const uint64_t *base, size_t *new_vl, size_t } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -115,7 +116,7 @@ vuint64m8_t test_th_vle64ff_v_u64m8(const uint64_t *base, size_t *new_vl, size_t // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_f64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv1f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -128,7 +129,7 @@ vfloat64m1_t test_th_vle64ff_v_f64m1(const float64_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv2f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -141,7 +142,7 @@ vfloat64m2_t test_th_vle64ff_v_f64m2(const float64_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv4f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -154,7 +155,7 @@ vfloat64m4_t test_th_vle64ff_v_f64m4(const float64_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local @test_th_vle64ff_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle8ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle8ff.c index 23da4244ed16ea..5a941ea1fcbbd9 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle8ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride-ff/wrappers/vle8ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -19,7 +20,7 @@ vint8m1_t test_th_vle8ff_v_i8m1(const int8_t *base, size_t *new_vl, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -32,7 +33,7 @@ vint8m2_t test_th_vle8ff_v_i8m2(const int8_t *base, size_t *new_vl, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -45,7 +46,7 @@ vint8m4_t test_th_vle8ff_v_i8m4(const int8_t *base, size_t *new_vl, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -58,7 +59,7 @@ vint8m8_t test_th_vle8ff_v_i8m8(const int8_t *base, size_t *new_vl, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv8i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -71,7 +72,7 @@ vuint8m1_t test_th_vle8ff_v_u8m1(const uint8_t *base, size_t *new_vl, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -84,7 +85,7 @@ vuint8m2_t test_th_vle8ff_v_u8m2(const uint8_t *base, size_t *new_vl, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 @@ -97,7 +98,7 @@ vuint8m4_t test_th_vle8ff_v_u8m4(const uint8_t *base, size_t *new_vl, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_th_vle8ff_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , i64 } @llvm.riscv.th.vleff.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlb.c index 864afd57f3ce1c..4fdd10a6402757 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlb.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlb.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlb_v_i8m1(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlb_v_i8m2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlb_v_i8m4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlb_v_i8m8(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlb_v_i16m1(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlb_v_i16m2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlb_v_i16m4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlb_v_i16m8(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlb_v_i32m1(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlb_v_i32m2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlb_v_i32m4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlb_v_i32m8(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlb_v_i64m1(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlb_v_i64m2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlb_v_i64m4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlbu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlbu.c index 20ac6deb429929..a61a29fef8f4a4 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlbu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlbu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlbu_v_u8m1(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlbu_v_u8m2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlbu_v_u8m4(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlbu_v_u8m8(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlbu_v_u16m1(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlbu_v_u16m2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlbu_v_u16m4(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlbu_v_u16m8(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlbu_v_u32m1(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlbu_v_u32m2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlbu_v_u32m4(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlbu_v_u32m8(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlbu_v_u64m1(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlbu_v_u64m2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlbu_v_u64m4(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle16.c index b5835f1adde755..982f86939e9ff7 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_th_vle16_v_f16m1(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m2_t test_th_vle16_v_f16m2(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m4_t test_th_vle16_v_f16m4(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m8_t test_th_vle16_v_f16m8(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vle16_v_i16m1(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vle16_v_i16m2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vle16_v_i16m4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vle16_v_i16m8(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_th_vle16_v_u16m1(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m2_t test_th_vle16_v_u16m2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m4_t test_th_vle16_v_u16m4(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle32.c index 12d6dca8ae8f8d..169f8f436ff96d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1_t test_th_vle32_v_f32m1(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat32m2_t test_th_vle32_v_f32m2(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat32m4_t test_th_vle32_v_f32m4(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat32m8_t test_th_vle32_v_f32m8(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint32m1_t test_th_vle32_v_i32m1(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint32m2_t test_th_vle32_v_i32m2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint32m4_t test_th_vle32_v_i32m4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint32m8_t test_th_vle32_v_i32m8(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vle32_v_u32m1(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vle32_v_u32m2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vle32_v_u32m4(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle64.c index a80788fef14108..2cb3b4d18069b9 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat64m1_t test_th_vle64_v_f64m1(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat64m2_t test_th_vle64_v_f64m2(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat64m4_t test_th_vle64_v_f64m4(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat64m8_t test_th_vle64_v_f64m8(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint64m1_t test_th_vle64_v_i64m1(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint64m2_t test_th_vle64_v_i64m2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint64m4_t test_th_vle64_v_i64m4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint64m8_t test_th_vle64_v_i64m8(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint64m1_t test_th_vle64_v_u64m1(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint64m2_t test_th_vle64_v_u64m2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint64m4_t test_th_vle64_v_u64m4(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle8.c index 669c2755b2cd4e..61c98e9ad9c8b0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vle8_v_i8m1(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vle8_v_i8m2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vle8_v_i8m4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vle8_v_i8m8(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m1_t test_th_vle8_v_u8m1(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m2_t test_th_vle8_v_u8m2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m4_t test_th_vle8_v_u8m4(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlh.c index f1be06d9d1bbec..d0d0e339ec0933 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlh.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlh.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlh_v_i8m1(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlh_v_i8m2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlh_v_i8m4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlh_v_i8m8(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlh_v_i16m1(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlh_v_i16m2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlh_v_i16m4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlh_v_i16m8(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlh_v_i32m1(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlh_v_i32m2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlh_v_i32m4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlh_v_i32m8(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlh_v_i64m1(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlh_v_i64m2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlh_v_i64m4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlhu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlhu.c index 1bdaad8a043eca..38404f0da54d83 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlhu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlhu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlhu_v_u8m1(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlhu_v_u8m2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlhu_v_u8m4(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlhu_v_u8m8(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlhu_v_u16m1(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlhu_v_u16m2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlhu_v_u16m4(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlhu_v_u16m8(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlhu_v_u32m1(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlhu_v_u32m2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlhu_v_u32m4(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlhu_v_u32m8(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlhu_v_u64m1(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlhu_v_u64m2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlhu_v_u64m4(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlw.c index 9175ba2a4b2ad1..88363ae3875e5b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlw.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlw.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlw_v_i8m1(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlw_v_i8m2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlw_v_i8m4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlw_v_i8m8(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlw_v_i16m1(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlw_v_i16m2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlw_v_i16m4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlw_v_i16m8(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlw_v_i32m1(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlw_v_i32m2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlw_v_i32m4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlw_v_i32m8(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlw_v_i64m1(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlw_v_i64m2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlw_v_i64m4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlwu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlwu.c index 2231c718b61bec..1007f0e65c6bab 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlwu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlwu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlwu_v_u8m1(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlwu_v_u8m2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlwu_v_u8m4(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlwu_v_u8m8(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlwu_v_u16m1(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlwu_v_u16m2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlwu_v_u16m4(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlwu_v_u16m8(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlwu_v_u32m1(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlwu_v_u32m2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlwu_v_u32m4(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlwu_v_u32m8(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlwu_v_u64m1(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlwu_v_u64m2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlwu_v_u64m4(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsb.c index 0b3bda3ee2fb87..33394ee70e52dc 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsb.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsb.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsb_v_i8m1(int8_t *base, vint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsb_v_i8m2(int8_t *base, vint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsb_v_i8m4(int8_t *base, vint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsb_v_i8m8(int8_t *base, vint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsb_v_i16m1(int16_t *base, vint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsb_v_i16m2(int16_t *base, vint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsb_v_i16m4(int16_t *base, vint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsb_v_i16m8(int16_t *base, vint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsb_v_i32m1(int32_t *base, vint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsb_v_i32m2(int32_t *base, vint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsb_v_i32m4(int32_t *base, vint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsb_v_i32m8(int32_t *base, vint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsb_v_i64m1(int64_t *base, vint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsb_v_i64m2(int64_t *base, vint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsb_v_i64m4(int64_t *base, vint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsb_v_i64m8(int64_t *base, vint64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsb_v_u8m1(uint8_t *base, vuint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsb_v_u8m2(uint8_t *base, vuint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsb_v_u8m4(uint8_t *base, vuint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsb_v_u8m8(uint8_t *base, vuint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsb_v_u16m1(uint16_t *base, vuint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsb_v_u16m2(uint16_t *base, vuint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsb_v_u16m4(uint16_t *base, vuint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsb_v_u16m8(uint16_t *base, vuint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsb_v_u32m1(uint32_t *base, vuint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vsb_v_u32m2(uint32_t *base, vuint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vsb_v_u32m4(uint32_t *base, vuint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vsb_v_u32m8(uint32_t *base, vuint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vsb_v_u64m1(uint64_t *base, vuint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vsb_v_u64m2(uint64_t *base, vuint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vsb_v_u64m4(uint64_t *base, vuint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse16.c index 43aa311d5b8bad..94f7e5bdfd7b6e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vse16_v_f16m1(_Float16 *base, vfloat16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vse16_v_f16m2(_Float16 *base, vfloat16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vse16_v_f16m4(_Float16 *base, vfloat16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vse16_v_f16m8(_Float16 *base, vfloat16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vse16_v_i16m1(int16_t *base, vint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vse16_v_i16m2(int16_t *base, vint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vse16_v_i16m4(int16_t *base, vint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vse16_v_i16m8(int16_t *base, vint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vse16_v_u16m1(uint16_t *base, vuint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vse16_v_u16m2(uint16_t *base, vuint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vse16_v_u16m4(uint16_t *base, vuint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse32.c index 823c0fa3c53ac2..10814c9c30523f 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vse32_v_f32m1(float *base, vfloat32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vse32_v_f32m2(float *base, vfloat32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vse32_v_f32m4(float *base, vfloat32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vse32_v_f32m8(float *base, vfloat32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vse32_v_i32m1(int32_t *base, vint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vse32_v_i32m2(int32_t *base, vint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vse32_v_i32m4(int32_t *base, vint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vse32_v_i32m8(int32_t *base, vint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vse32_v_u32m1(uint32_t *base, vuint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vse32_v_u32m2(uint32_t *base, vuint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vse32_v_u32m4(uint32_t *base, vuint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse64.c index 89941d6c3ed5d0..b78649228f24d1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vse64_v_f64m1(double *base, vfloat64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vse64_v_f64m2(double *base, vfloat64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vse64_v_f64m4(double *base, vfloat64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vse64_v_f64m8(double *base, vfloat64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vse64_v_i64m1(int64_t *base, vint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vse64_v_i64m2(int64_t *base, vint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vse64_v_i64m4(int64_t *base, vint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vse64_v_i64m8(int64_t *base, vint64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vse64_v_u64m1(uint64_t *base, vuint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vse64_v_u64m2(uint64_t *base, vuint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vse64_v_u64m4(uint64_t *base, vuint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse8.c index 3011d80a5dbbee..a368fd1a1e3caf 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vse8_v_i8m1(int8_t *base, vint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vse8_v_i8m2(int8_t *base, vint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vse8_v_i8m4(int8_t *base, vint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vse8_v_i8m8(int8_t *base, vint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vse8_v_u8m1(uint8_t *base, vuint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vse8_v_u8m2(uint8_t *base, vuint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vse8_v_u8m4(uint8_t *base, vuint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsh.c index 4fe02dbe87c124..48a3bb08f184e8 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsh.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsh.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsh_v_i8m1(int8_t *base, vint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsh_v_i8m2(int8_t *base, vint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsh_v_i8m4(int8_t *base, vint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsh_v_i8m8(int8_t *base, vint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsh_v_i16m1(int16_t *base, vint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsh_v_i16m2(int16_t *base, vint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsh_v_i16m4(int16_t *base, vint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsh_v_i16m8(int16_t *base, vint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsh_v_i32m1(int32_t *base, vint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsh_v_i32m2(int32_t *base, vint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsh_v_i32m4(int32_t *base, vint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsh_v_i32m8(int32_t *base, vint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsh_v_i64m1(int64_t *base, vint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsh_v_i64m2(int64_t *base, vint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsh_v_i64m4(int64_t *base, vint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsh_v_i64m8(int64_t *base, vint64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsh_v_u8m1(uint8_t *base, vuint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsh_v_u8m2(uint8_t *base, vuint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsh_v_u8m4(uint8_t *base, vuint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsh_v_u8m8(uint8_t *base, vuint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsh_v_u16m1(uint16_t *base, vuint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsh_v_u16m2(uint16_t *base, vuint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsh_v_u16m4(uint16_t *base, vuint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsh_v_u16m8(uint16_t *base, vuint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsh_v_u32m1(uint32_t *base, vuint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vsh_v_u32m2(uint32_t *base, vuint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vsh_v_u32m4(uint32_t *base, vuint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vsh_v_u32m8(uint32_t *base, vuint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vsh_v_u64m1(uint64_t *base, vuint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vsh_v_u64m2(uint64_t *base, vuint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vsh_v_u64m4(uint64_t *base, vuint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsw.c index 811d333db4f0f7..9104f1cf2ecc8b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsw.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsw.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsw_v_i8m1(int8_t *base, vint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsw_v_i8m2(int8_t *base, vint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsw_v_i8m4(int8_t *base, vint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsw_v_i8m8(int8_t *base, vint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsw_v_i16m1(int16_t *base, vint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsw_v_i16m2(int16_t *base, vint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsw_v_i16m4(int16_t *base, vint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsw_v_i16m8(int16_t *base, vint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsw_v_i32m1(int32_t *base, vint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsw_v_i32m2(int32_t *base, vint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsw_v_i32m4(int32_t *base, vint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsw_v_i32m8(int32_t *base, vint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsw_v_i64m1(int64_t *base, vint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsw_v_i64m2(int64_t *base, vint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsw_v_i64m4(int64_t *base, vint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsw_v_i64m8(int64_t *base, vint64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsw_v_u8m1(uint8_t *base, vuint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsw_v_u8m2(uint8_t *base, vuint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsw_v_u8m4(uint8_t *base, vuint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsw_v_u8m8(uint8_t *base, vuint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsw_v_u16m1(uint16_t *base, vuint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsw_v_u16m2(uint16_t *base, vuint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsw_v_u16m4(uint16_t *base, vuint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsw_v_u16m8(uint16_t *base, vuint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsw_v_u32m1(uint32_t *base, vuint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vsw_v_u32m2(uint32_t *base, vuint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vsw_v_u32m4(uint32_t *base, vuint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vsw_v_u32m8(uint32_t *base, vuint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vsw_v_u64m1(uint64_t *base, vuint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vsw_v_u64m2(uint64_t *base, vuint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vsw_v_u64m4(uint64_t *base, vuint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlb.c index 0c245940a48335..dac5b9f49b7ba6 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlb.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlb.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlb_v_i8m1(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlb_v_i8m2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlb_v_i8m4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlb_v_i8m8(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlb_v_i16m1(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlb_v_i16m2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlb_v_i16m4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlb_v_i16m8(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlb_v_i32m1(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlb_v_i32m2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlb_v_i32m4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlb_v_i32m8(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlb_v_i64m1(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlb_v_i64m2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlb_v_i64m4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlb.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlbu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlbu.c index 01b8e6c369a6a6..2154640f9983db 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlbu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlbu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlbu_v_u8m1(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlbu_v_u8m2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlbu_v_u8m4(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlbu_v_u8m8(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlbu_v_u16m1(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlbu_v_u16m2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlbu_v_u16m4(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlbu_v_u16m8(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlbu_v_u32m1(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlbu_v_u32m2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlbu_v_u32m4(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlbu_v_u32m8(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlbu_v_u64m1(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlbu_v_u64m2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlbu_v_u64m4(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlbu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlbu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle16.c index 106a41f4d8dd12..83a0e5ee47e98d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_th_vle16_v_f16m1(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m2_t test_th_vle16_v_f16m2(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m4_t test_th_vle16_v_f16m4(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m8_t test_th_vle16_v_f16m8(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vle16_v_i16m1(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vle16_v_i16m2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vle16_v_i16m4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vle16_v_i16m8(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_th_vle16_v_u16m1(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m2_t test_th_vle16_v_u16m2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m4_t test_th_vle16_v_u16m4(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle32.c index 33cc39f67c44b3..51887604cb705d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1_t test_th_vle32_v_f32m1(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat32m2_t test_th_vle32_v_f32m2(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat32m4_t test_th_vle32_v_f32m4(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat32m8_t test_th_vle32_v_f32m8(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint32m1_t test_th_vle32_v_i32m1(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint32m2_t test_th_vle32_v_i32m2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint32m4_t test_th_vle32_v_i32m4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint32m8_t test_th_vle32_v_i32m8(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vle32_v_u32m1(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vle32_v_u32m2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vle32_v_u32m4(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle64.c index 8d6271362b4c09..1353f1d628dd30 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat64m1_t test_th_vle64_v_f64m1(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat64m2_t test_th_vle64_v_f64m2(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat64m4_t test_th_vle64_v_f64m4(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat64m8_t test_th_vle64_v_f64m8(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint64m1_t test_th_vle64_v_i64m1(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint64m2_t test_th_vle64_v_i64m2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint64m4_t test_th_vle64_v_i64m4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint64m8_t test_th_vle64_v_i64m8(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint64m1_t test_th_vle64_v_u64m1(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint64m2_t test_th_vle64_v_u64m2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint64m4_t test_th_vle64_v_u64m4(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle8.c index b3d4fdd635dabd..ab2c483e0c4502 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vle8_v_i8m1(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vle8_v_i8m2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vle8_v_i8m4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vle8_v_i8m8(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m1_t test_th_vle8_v_u8m1(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m2_t test_th_vle8_v_u8m2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m4_t test_th_vle8_v_u8m4(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlh.c index 4f35bf37532f7f..66ded59357c9f3 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlh.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlh.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlh_v_i8m1(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlh_v_i8m2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlh_v_i8m4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlh_v_i8m8(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlh_v_i16m1(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlh_v_i16m2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlh_v_i16m4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlh_v_i16m8(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlh_v_i32m1(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlh_v_i32m2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlh_v_i32m4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlh_v_i32m8(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlh_v_i64m1(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlh_v_i64m2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlh_v_i64m4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlh.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlhu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlhu.c index 1427896ef00983..8679d4c98d3e08 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlhu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlhu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlhu_v_u8m1(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlhu_v_u8m2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlhu_v_u8m4(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlhu_v_u8m8(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlhu_v_u16m1(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlhu_v_u16m2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlhu_v_u16m4(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlhu_v_u16m8(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlhu_v_u32m1(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlhu_v_u32m2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlhu_v_u32m4(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlhu_v_u32m8(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlhu_v_u64m1(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlhu_v_u64m2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlhu_v_u64m4(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlhu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlhu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlw.c index f26dbb5d41060b..b7e82f365adb2b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlw.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlw.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_th_vlw_v_i8m1(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_th_vlw_v_i8m2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_th_vlw_v_i8m4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_th_vlw_v_i8m8(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_th_vlw_v_i16m1(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_th_vlw_v_i16m2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_th_vlw_v_i16m4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_th_vlw_v_i16m8(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_th_vlw_v_i32m1(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_th_vlw_v_i32m2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_th_vlw_v_i32m4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_th_vlw_v_i32m8(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_th_vlw_v_i64m1(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_th_vlw_v_i64m2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_th_vlw_v_i64m4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlw.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlwu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlwu.c index 790cc2d074444d..4ec7f62b15e8da 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlwu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlwu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_th_vlwu_v_u8m1(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_th_vlwu_v_u8m2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_th_vlwu_v_u8m4(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_th_vlwu_v_u8m8(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_th_vlwu_v_u16m1(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_th_vlwu_v_u16m2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_th_vlwu_v_u16m4(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_th_vlwu_v_u16m8(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_th_vlwu_v_u32m1(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_th_vlwu_v_u32m2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_th_vlwu_v_u32m4(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_th_vlwu_v_u32m8(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_th_vlwu_v_u64m1(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_th_vlwu_v_u64m2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_th_vlwu_v_u64m4(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vlwu_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlwu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsb.c index 10aa7248b8f0db..cfa4376256a36e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsb.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsb.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsb_v_i8m1(int8_t *base, vint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsb_v_i8m2(int8_t *base, vint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsb_v_i8m4(int8_t *base, vint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsb_v_i8m8(int8_t *base, vint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsb_v_i16m1(int16_t *base, vint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsb_v_i16m2(int16_t *base, vint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsb_v_i16m4(int16_t *base, vint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsb_v_i16m8(int16_t *base, vint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsb_v_i32m1(int32_t *base, vint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsb_v_i32m2(int32_t *base, vint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsb_v_i32m4(int32_t *base, vint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsb_v_i32m8(int32_t *base, vint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsb_v_i64m1(int64_t *base, vint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsb_v_i64m2(int64_t *base, vint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsb_v_i64m4(int64_t *base, vint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsb_v_i64m8(int64_t *base, vint64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsb_v_u8m1(uint8_t *base, vuint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsb_v_u8m2(uint8_t *base, vuint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsb_v_u8m4(uint8_t *base, vuint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsb_v_u8m8(uint8_t *base, vuint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsb_v_u16m1(uint16_t *base, vuint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsb_v_u16m2(uint16_t *base, vuint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsb_v_u16m4(uint16_t *base, vuint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsb_v_u16m8(uint16_t *base, vuint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsb_v_u32m1(uint32_t *base, vuint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vsb_v_u32m2(uint32_t *base, vuint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vsb_v_u32m4(uint32_t *base, vuint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vsb_v_u32m8(uint32_t *base, vuint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vsb_v_u64m1(uint64_t *base, vuint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vsb_v_u64m2(uint64_t *base, vuint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vsb_v_u64m4(uint64_t *base, vuint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsb_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse16.c index db53818cba24c3..60391d3289bbcf 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vse16_v_f16m1(_Float16 *base, vfloat16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vse16_v_f16m2(_Float16 *base, vfloat16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vse16_v_f16m4(_Float16 *base, vfloat16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vse16_v_f16m8(_Float16 *base, vfloat16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vse16_v_i16m1(int16_t *base, vint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vse16_v_i16m2(int16_t *base, vint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vse16_v_i16m4(int16_t *base, vint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vse16_v_i16m8(int16_t *base, vint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vse16_v_u16m1(uint16_t *base, vuint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vse16_v_u16m2(uint16_t *base, vuint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vse16_v_u16m4(uint16_t *base, vuint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse32.c index f579072948b781..873dd693eee72d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vse32_v_f32m1(float *base, vfloat32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vse32_v_f32m2(float *base, vfloat32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vse32_v_f32m4(float *base, vfloat32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vse32_v_f32m8(float *base, vfloat32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vse32_v_i32m1(int32_t *base, vint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vse32_v_i32m2(int32_t *base, vint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vse32_v_i32m4(int32_t *base, vint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vse32_v_i32m8(int32_t *base, vint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vse32_v_u32m1(uint32_t *base, vuint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vse32_v_u32m2(uint32_t *base, vuint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vse32_v_u32m4(uint32_t *base, vuint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse64.c index 6905618b2932ec..9a1b18b60f24eb 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vse64_v_f64m1(double *base, vfloat64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vse64_v_f64m2(double *base, vfloat64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vse64_v_f64m4(double *base, vfloat64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vse64_v_f64m8(double *base, vfloat64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vse64_v_i64m1(int64_t *base, vint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vse64_v_i64m2(int64_t *base, vint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vse64_v_i64m4(int64_t *base, vint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vse64_v_i64m8(int64_t *base, vint64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vse64_v_u64m1(uint64_t *base, vuint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vse64_v_u64m2(uint64_t *base, vuint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vse64_v_u64m4(uint64_t *base, vuint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse8.c index 788320f08293b6..92f9d3aa9f3479 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vse8_v_i8m1(int8_t *base, vint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vse8_v_i8m2(int8_t *base, vint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vse8_v_i8m4(int8_t *base, vint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vse8_v_i8m8(int8_t *base, vint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vse8_v_u8m1(uint8_t *base, vuint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vse8_v_u8m2(uint8_t *base, vuint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vse8_v_u8m4(uint8_t *base, vuint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsh.c index 73d2d68451069a..294fc02ba592f1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsh.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsh.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsh_v_i8m1(int8_t *base, vint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsh_v_i8m2(int8_t *base, vint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsh_v_i8m4(int8_t *base, vint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsh_v_i8m8(int8_t *base, vint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsh_v_i16m1(int16_t *base, vint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsh_v_i16m2(int16_t *base, vint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsh_v_i16m4(int16_t *base, vint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsh_v_i16m8(int16_t *base, vint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsh_v_i32m1(int32_t *base, vint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsh_v_i32m2(int32_t *base, vint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsh_v_i32m4(int32_t *base, vint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsh_v_i32m8(int32_t *base, vint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsh_v_i64m1(int64_t *base, vint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsh_v_i64m2(int64_t *base, vint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsh_v_i64m4(int64_t *base, vint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsh_v_i64m8(int64_t *base, vint64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsh_v_u8m1(uint8_t *base, vuint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsh_v_u8m2(uint8_t *base, vuint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsh_v_u8m4(uint8_t *base, vuint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsh_v_u8m8(uint8_t *base, vuint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsh_v_u16m1(uint16_t *base, vuint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsh_v_u16m2(uint16_t *base, vuint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsh_v_u16m4(uint16_t *base, vuint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsh_v_u16m8(uint16_t *base, vuint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsh_v_u32m1(uint32_t *base, vuint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vsh_v_u32m2(uint32_t *base, vuint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vsh_v_u32m4(uint32_t *base, vuint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vsh_v_u32m8(uint32_t *base, vuint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vsh_v_u64m1(uint64_t *base, vuint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vsh_v_u64m2(uint64_t *base, vuint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vsh_v_u64m4(uint64_t *base, vuint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsh_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsw.c index 823fc82a29ded4..39ddbe5b91b4a6 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsw.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsw.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ void test_th_vsw_v_i8m1(int8_t *base, vint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +27,7 @@ void test_th_vsw_v_i8m2(int8_t *base, vint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +37,7 @@ void test_th_vsw_v_i8m4(int8_t *base, vint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +47,7 @@ void test_th_vsw_v_i8m8(int8_t *base, vint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +57,7 @@ void test_th_vsw_v_i16m1(int16_t *base, vint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +67,7 @@ void test_th_vsw_v_i16m2(int16_t *base, vint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +77,7 @@ void test_th_vsw_v_i16m4(int16_t *base, vint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +87,7 @@ void test_th_vsw_v_i16m8(int16_t *base, vint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +97,7 @@ void test_th_vsw_v_i32m1(int32_t *base, vint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +107,7 @@ void test_th_vsw_v_i32m2(int32_t *base, vint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +117,7 @@ void test_th_vsw_v_i32m4(int32_t *base, vint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -126,7 +127,7 @@ void test_th_vsw_v_i32m8(int32_t *base, vint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -136,7 +137,7 @@ void test_th_vsw_v_i64m1(int64_t *base, vint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -146,7 +147,7 @@ void test_th_vsw_v_i64m2(int64_t *base, vint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -156,7 +157,7 @@ void test_th_vsw_v_i64m4(int64_t *base, vint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -166,7 +167,7 @@ void test_th_vsw_v_i64m8(int64_t *base, vint64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -176,7 +177,7 @@ void test_th_vsw_v_u8m1(uint8_t *base, vuint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -186,7 +187,7 @@ void test_th_vsw_v_u8m2(uint8_t *base, vuint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -196,7 +197,7 @@ void test_th_vsw_v_u8m4(uint8_t *base, vuint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -206,7 +207,7 @@ void test_th_vsw_v_u8m8(uint8_t *base, vuint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -216,7 +217,7 @@ void test_th_vsw_v_u16m1(uint16_t *base, vuint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -226,7 +227,7 @@ void test_th_vsw_v_u16m2(uint16_t *base, vuint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -236,7 +237,7 @@ void test_th_vsw_v_u16m4(uint16_t *base, vuint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -246,7 +247,7 @@ void test_th_vsw_v_u16m8(uint16_t *base, vuint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -256,7 +257,7 @@ void test_th_vsw_v_u32m1(uint32_t *base, vuint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -266,7 +267,7 @@ void test_th_vsw_v_u32m2(uint32_t *base, vuint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -276,7 +277,7 @@ void test_th_vsw_v_u32m4(uint32_t *base, vuint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -286,7 +287,7 @@ void test_th_vsw_v_u32m8(uint32_t *base, vuint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -296,7 +297,7 @@ void test_th_vsw_v_u64m1(uint64_t *base, vuint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -306,7 +307,7 @@ void test_th_vsw_v_u64m2(uint64_t *base, vuint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -316,7 +317,7 @@ void test_th_vsw_v_u64m4(uint64_t *base, vuint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsw_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vadc.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vadc.c index c6040c7b57140d..38df3771aa46dc 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vadc.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vadc.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vadc_vvm_i8m1(vint8m1_t op1, vint8m1_t op2, vbool8_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vadc_vxm_i8m1(vint8m1_t op1, int8_t op2, vbool8_t carryin, size_t } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vadc_vvm_i8m2(vint8m2_t op1, vint8m2_t op2, vbool4_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vadc_vxm_i8m2(vint8m2_t op1, int8_t op2, vbool4_t carryin, size_t } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vadc_vvm_i8m4(vint8m4_t op1, vint8m4_t op2, vbool2_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vadc_vxm_i8m4(vint8m4_t op1, int8_t op2, vbool2_t carryin, size_t } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vadc_vvm_i8m8(vint8m8_t op1, vint8m8_t op2, vbool1_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vadc_vxm_i8m8(vint8m8_t op1, int8_t op2, vbool1_t carryin, size_t } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vadc_vvm_i16m1(vint16m1_t op1, vint16m1_t op2, vbool16_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vadc_vxm_i16m1(vint16m1_t op1, int16_t op2, vbool16_t carryin, s } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vadc_vvm_i16m2(vint16m2_t op1, vint16m2_t op2, vbool8_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vadc_vxm_i16m2(vint16m2_t op1, int16_t op2, vbool8_t carryin, si } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vadc_vvm_i16m4(vint16m4_t op1, vint16m4_t op2, vbool4_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vadc_vxm_i16m4(vint16m4_t op1, int16_t op2, vbool4_t carryin, si } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vadc_vvm_i16m8(vint16m8_t op1, vint16m8_t op2, vbool2_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vadc_vxm_i16m8(vint16m8_t op1, int16_t op2, vbool2_t carryin, si } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vadc_vvm_i32m1(vint32m1_t op1, vint32m1_t op2, vbool32_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vadc_vxm_i32m1(vint32m1_t op1, int32_t op2, vbool32_t carryin, s } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vadc_vvm_i32m2(vint32m2_t op1, vint32m2_t op2, vbool16_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vadc_vxm_i32m2(vint32m2_t op1, int32_t op2, vbool16_t carryin, s } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vadc_vvm_i32m4(vint32m4_t op1, vint32m4_t op2, vbool8_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vadc_vxm_i32m4(vint32m4_t op1, int32_t op2, vbool8_t carryin, si } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vadc_vvm_i32m8(vint32m8_t op1, vint32m8_t op2, vbool4_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vadc_vxm_i32m8(vint32m8_t op1, int32_t op2, vbool4_t carryin, si } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vadc_vvm_i64m1(vint64m1_t op1, vint64m1_t op2, vbool64_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vadc_vxm_i64m1(vint64m1_t op1, int64_t op2, vbool64_t carryin, s } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vadc_vvm_i64m2(vint64m2_t op1, vint64m2_t op2, vbool32_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vadc_vxm_i64m2(vint64m2_t op1, int64_t op2, vbool32_t carryin, s } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vadc_vvm_i64m4(vint64m4_t op1, vint64m4_t op2, vbool16_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vadc_vxm_i64m4(vint64m4_t op1, int64_t op2, vbool16_t carryin, s } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vadc_vvm_i64m8(vint64m8_t op1, vint64m8_t op2, vbool8_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vadc_vxm_i64m8(vint64m8_t op1, int64_t op2, vbool8_t carryin, si } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vadc_vvm_u8m1(vuint8m1_t op1, vuint8m1_t op2, vbool8_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vadc_vxm_u8m1(vuint8m1_t op1, uint8_t op2, vbool8_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vadc_vvm_u8m2(vuint8m2_t op1, vuint8m2_t op2, vbool4_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vadc_vxm_u8m2(vuint8m2_t op1, uint8_t op2, vbool4_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vadc_vvm_u8m4(vuint8m4_t op1, vuint8m4_t op2, vbool2_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vadc_vxm_u8m4(vuint8m4_t op1, uint8_t op2, vbool2_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vadc_vvm_u8m8(vuint8m8_t op1, vuint8m8_t op2, vbool1_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vadc_vxm_u8m8(vuint8m8_t op1, uint8_t op2, vbool1_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vadc_vvm_u16m1(vuint16m1_t op1, vuint16m1_t op2, vbool16_t carr } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vadc_vxm_u16m1(vuint16m1_t op1, uint16_t op2, vbool16_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vadc_vvm_u16m2(vuint16m2_t op1, vuint16m2_t op2, vbool8_t carry } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vadc_vxm_u16m2(vuint16m2_t op1, uint16_t op2, vbool8_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vadc_vvm_u16m4(vuint16m4_t op1, vuint16m4_t op2, vbool4_t carry } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vadc_vxm_u16m4(vuint16m4_t op1, uint16_t op2, vbool4_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vadc_vvm_u16m8(vuint16m8_t op1, vuint16m8_t op2, vbool2_t carry } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vadc_vxm_u16m8(vuint16m8_t op1, uint16_t op2, vbool2_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vadc_vvm_u32m1(vuint32m1_t op1, vuint32m1_t op2, vbool32_t carr } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vadc_vxm_u32m1(vuint32m1_t op1, uint32_t op2, vbool32_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vadc_vvm_u32m2(vuint32m2_t op1, vuint32m2_t op2, vbool16_t carr } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vadc_vxm_u32m2(vuint32m2_t op1, uint32_t op2, vbool16_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vadc_vvm_u32m4(vuint32m4_t op1, vuint32m4_t op2, vbool8_t carry } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vadc_vxm_u32m4(vuint32m4_t op1, uint32_t op2, vbool8_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vadc_vvm_u32m8(vuint32m8_t op1, vuint32m8_t op2, vbool4_t carry } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vadc_vxm_u32m8(vuint32m8_t op1, uint32_t op2, vbool4_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vadc_vvm_u64m1(vuint64m1_t op1, vuint64m1_t op2, vbool64_t carr } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vadc_vxm_u64m1(vuint64m1_t op1, uint64_t op2, vbool64_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vadc_vvm_u64m2(vuint64m2_t op1, vuint64m2_t op2, vbool32_t carr } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vadc_vxm_u64m2(vuint64m2_t op1, uint64_t op2, vbool32_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vadc_vvm_u64m4(vuint64m4_t op1, vuint64m4_t op2, vbool16_t carr } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vadc_vxm_u64m4(vuint64m4_t op1, uint64_t op2, vbool16_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vadc_vvm_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vadc_vvm_u64m8(vuint64m8_t op1, vuint64m8_t op2, vbool8_t carry } // CHECK-RV64-LABEL: define dso_local @test_vadc_vxm_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadc.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vmadc.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vmadc.c index 308f74af1e1464..2c6a243f3fc353 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vmadc.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vmadc.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmadc_vvm_i8m1_b8(vint8m1_t op1, vint8m1_t op2, vbool8_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmadc_vxm_i8m1_b8(vint8m1_t op1, int8_t op2, vbool8_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmadc_vvm_i8m2_b4(vint8m2_t op1, vint8m2_t op2, vbool4_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmadc_vxm_i8m2_b4(vint8m2_t op1, int8_t op2, vbool4_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmadc_vvm_i8m4_b2(vint8m4_t op1, vint8m4_t op2, vbool2_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmadc_vxm_i8m4_b2(vint8m4_t op1, int8_t op2, vbool2_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmadc_vvm_i8m8_b1(vint8m8_t op1, vint8m8_t op2, vbool1_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmadc_vxm_i8m8_b1(vint8m8_t op1, int8_t op2, vbool1_t carryin, siz } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmadc_vvm_i16m1_b16(vint16m1_t op1, vint16m1_t op2, vbool16_t car } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmadc_vxm_i16m1_b16(vint16m1_t op1, int16_t op2, vbool16_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmadc_vvm_i16m2_b8(vint16m2_t op1, vint16m2_t op2, vbool8_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmadc_vxm_i16m2_b8(vint16m2_t op1, int16_t op2, vbool8_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmadc_vvm_i16m4_b4(vint16m4_t op1, vint16m4_t op2, vbool4_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmadc_vxm_i16m4_b4(vint16m4_t op1, int16_t op2, vbool4_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmadc_vvm_i16m8_b2(vint16m8_t op1, vint16m8_t op2, vbool2_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmadc_vxm_i16m8_b2(vint16m8_t op1, int16_t op2, vbool2_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmadc_vvm_i32m1_b32(vint32m1_t op1, vint32m1_t op2, vbool32_t car } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmadc_vxm_i32m1_b32(vint32m1_t op1, int32_t op2, vbool32_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmadc_vvm_i32m2_b16(vint32m2_t op1, vint32m2_t op2, vbool16_t car } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmadc_vxm_i32m2_b16(vint32m2_t op1, int32_t op2, vbool16_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmadc_vvm_i32m4_b8(vint32m4_t op1, vint32m4_t op2, vbool8_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmadc_vxm_i32m4_b8(vint32m4_t op1, int32_t op2, vbool8_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmadc_vvm_i32m8_b4(vint32m8_t op1, vint32m8_t op2, vbool4_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmadc_vxm_i32m8_b4(vint32m8_t op1, int32_t op2, vbool4_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmadc_vvm_i64m1_b64(vint64m1_t op1, vint64m1_t op2, vbool64_t car } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmadc_vxm_i64m1_b64(vint64m1_t op1, int64_t op2, vbool64_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmadc_vvm_i64m2_b32(vint64m2_t op1, vint64m2_t op2, vbool32_t car } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmadc_vxm_i64m2_b32(vint64m2_t op1, int64_t op2, vbool32_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmadc_vvm_i64m4_b16(vint64m4_t op1, vint64m4_t op2, vbool16_t car } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmadc_vxm_i64m4_b16(vint64m4_t op1, int64_t op2, vbool16_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmadc_vvm_i64m8_b8(vint64m8_t op1, vint64m8_t op2, vbool8_t carryi } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmadc_vxm_i64m8_b8(vint64m8_t op1, int64_t op2, vbool8_t carryin, } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmadc_vvm_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, vbool8_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmadc_vxm_u8m1_b8(vuint8m1_t op1, uint8_t op2, vbool8_t carryin, s } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmadc_vvm_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, vbool4_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmadc_vxm_u8m2_b4(vuint8m2_t op1, uint8_t op2, vbool4_t carryin, s } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmadc_vvm_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, vbool2_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmadc_vxm_u8m4_b2(vuint8m4_t op1, uint8_t op2, vbool2_t carryin, s } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmadc_vvm_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, vbool1_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmadc_vxm_u8m8_b1(vuint8m8_t op1, uint8_t op2, vbool1_t carryin, s } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmadc_vvm_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, vbool16_t c } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmadc_vxm_u16m1_b16(vuint16m1_t op1, uint16_t op2, vbool16_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmadc_vvm_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, vbool8_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmadc_vxm_u16m2_b8(vuint16m2_t op1, uint16_t op2, vbool8_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmadc_vvm_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, vbool4_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmadc_vxm_u16m4_b4(vuint16m4_t op1, uint16_t op2, vbool4_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmadc_vvm_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, vbool2_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmadc_vxm_u16m8_b2(vuint16m8_t op1, uint16_t op2, vbool2_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmadc_vvm_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, vbool32_t c } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmadc_vxm_u32m1_b32(vuint32m1_t op1, uint32_t op2, vbool32_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmadc_vvm_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, vbool16_t c } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmadc_vxm_u32m2_b16(vuint32m2_t op1, uint32_t op2, vbool16_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmadc_vvm_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, vbool8_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmadc_vxm_u32m4_b8(vuint32m4_t op1, uint32_t op2, vbool8_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmadc_vvm_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, vbool4_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmadc_vxm_u32m8_b4(vuint32m8_t op1, uint32_t op2, vbool4_t carryin } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmadc_vvm_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, vbool64_t c } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmadc_vxm_u64m1_b64(vuint64m1_t op1, uint64_t op2, vbool64_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmadc_vvm_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, vbool32_t c } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmadc_vxm_u64m2_b32(vuint64m2_t op1, uint64_t op2, vbool32_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmadc_vvm_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, vbool16_t c } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmadc_vxm_u64m4_b16(vuint64m4_t op1, uint64_t op2, vbool16_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vvm_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmadc_vvm_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, vbool8_t carr } // CHECK-RV64-LABEL: define dso_local @test_vmadc_vxm_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[CARRYIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadc.carry.in.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], [[CARRYIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vmsbc.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vmsbc.c index f16a4d3e01023f..010c223931d111 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vmsbc.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vmsbc.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsbc_vvm_i8m1_b8(vint8m1_t op1, vint8m1_t op2, vbool8_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsbc_vxm_i8m1_b8(vint8m1_t op1, int8_t op2, vbool8_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsbc_vvm_i8m2_b4(vint8m2_t op1, vint8m2_t op2, vbool4_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsbc_vxm_i8m2_b4(vint8m2_t op1, int8_t op2, vbool4_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsbc_vvm_i8m4_b2(vint8m4_t op1, vint8m4_t op2, vbool2_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsbc_vxm_i8m4_b2(vint8m4_t op1, int8_t op2, vbool2_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsbc_vvm_i8m8_b1(vint8m8_t op1, vint8m8_t op2, vbool1_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsbc_vxm_i8m8_b1(vint8m8_t op1, int8_t op2, vbool1_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsbc_vvm_i16m1_b16(vint16m1_t op1, vint16m1_t op2, vbool16_t bor } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsbc_vxm_i16m1_b16(vint16m1_t op1, int16_t op2, vbool16_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsbc_vvm_i16m2_b8(vint16m2_t op1, vint16m2_t op2, vbool8_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsbc_vxm_i16m2_b8(vint16m2_t op1, int16_t op2, vbool8_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsbc_vvm_i16m4_b4(vint16m4_t op1, vint16m4_t op2, vbool4_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsbc_vxm_i16m4_b4(vint16m4_t op1, int16_t op2, vbool4_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsbc_vvm_i16m8_b2(vint16m8_t op1, vint16m8_t op2, vbool2_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsbc_vxm_i16m8_b2(vint16m8_t op1, int16_t op2, vbool2_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsbc_vvm_i32m1_b32(vint32m1_t op1, vint32m1_t op2, vbool32_t bor } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsbc_vxm_i32m1_b32(vint32m1_t op1, int32_t op2, vbool32_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsbc_vvm_i32m2_b16(vint32m2_t op1, vint32m2_t op2, vbool16_t bor } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsbc_vxm_i32m2_b16(vint32m2_t op1, int32_t op2, vbool16_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsbc_vvm_i32m4_b8(vint32m4_t op1, vint32m4_t op2, vbool8_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsbc_vxm_i32m4_b8(vint32m4_t op1, int32_t op2, vbool8_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsbc_vvm_i32m8_b4(vint32m8_t op1, vint32m8_t op2, vbool4_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsbc_vxm_i32m8_b4(vint32m8_t op1, int32_t op2, vbool4_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsbc_vvm_i64m1_b64(vint64m1_t op1, vint64m1_t op2, vbool64_t bor } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsbc_vxm_i64m1_b64(vint64m1_t op1, int64_t op2, vbool64_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsbc_vvm_i64m2_b32(vint64m2_t op1, vint64m2_t op2, vbool32_t bor } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsbc_vxm_i64m2_b32(vint64m2_t op1, int64_t op2, vbool32_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsbc_vvm_i64m4_b16(vint64m4_t op1, vint64m4_t op2, vbool16_t bor } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsbc_vxm_i64m4_b16(vint64m4_t op1, int64_t op2, vbool16_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsbc_vvm_i64m8_b8(vint64m8_t op1, vint64m8_t op2, vbool8_t borrow } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsbc_vxm_i64m8_b8(vint64m8_t op1, int64_t op2, vbool8_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsbc_vvm_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, vbool8_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsbc_vxm_u8m1_b8(vuint8m1_t op1, uint8_t op2, vbool8_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsbc_vvm_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, vbool4_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsbc_vxm_u8m2_b4(vuint8m2_t op1, uint8_t op2, vbool4_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsbc_vvm_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, vbool2_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsbc_vxm_u8m4_b2(vuint8m4_t op1, uint8_t op2, vbool2_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsbc_vvm_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, vbool1_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsbc_vxm_u8m8_b1(vuint8m8_t op1, uint8_t op2, vbool1_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsbc_vvm_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, vbool16_t b } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsbc_vxm_u16m1_b16(vuint16m1_t op1, uint16_t op2, vbool16_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsbc_vvm_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, vbool8_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsbc_vxm_u16m2_b8(vuint16m2_t op1, uint16_t op2, vbool8_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsbc_vvm_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, vbool4_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsbc_vxm_u16m4_b4(vuint16m4_t op1, uint16_t op2, vbool4_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsbc_vvm_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, vbool2_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsbc_vxm_u16m8_b2(vuint16m8_t op1, uint16_t op2, vbool2_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsbc_vvm_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, vbool32_t b } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsbc_vxm_u32m1_b32(vuint32m1_t op1, uint32_t op2, vbool32_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsbc_vvm_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, vbool16_t b } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsbc_vxm_u32m2_b16(vuint32m2_t op1, uint32_t op2, vbool16_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsbc_vvm_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, vbool8_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsbc_vxm_u32m4_b8(vuint32m4_t op1, uint32_t op2, vbool8_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsbc_vvm_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, vbool4_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsbc_vxm_u32m8_b4(vuint32m8_t op1, uint32_t op2, vbool4_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsbc_vvm_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, vbool64_t b } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsbc_vxm_u64m1_b64(vuint64m1_t op1, uint64_t op2, vbool64_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsbc_vvm_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, vbool32_t b } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsbc_vxm_u64m2_b32(vuint64m2_t op1, uint64_t op2, vbool32_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsbc_vvm_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, vbool16_t b } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsbc_vxm_u64m4_b16(vuint64m4_t op1, uint64_t op2, vbool16_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vvm_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsbc_vvm_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, vbool8_t borr } // CHECK-RV64-LABEL: define dso_local @test_vmsbc_vxm_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbc.borrow.in.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vsbc.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vsbc.c index ef8e44843fe196..4d6300d900ad61 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vsbc.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-add-with-carry/thead/vsbc.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vsbc_vvm_i8m1(vint8m1_t op1, vint8m1_t op2, vbool8_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vsbc_vxm_i8m1(vint8m1_t op1, int8_t op2, vbool8_t borrowin, size_ } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vsbc_vvm_i8m2(vint8m2_t op1, vint8m2_t op2, vbool4_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vsbc_vxm_i8m2(vint8m2_t op1, int8_t op2, vbool4_t borrowin, size_ } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vsbc_vvm_i8m4(vint8m4_t op1, vint8m4_t op2, vbool2_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vsbc_vxm_i8m4(vint8m4_t op1, int8_t op2, vbool2_t borrowin, size_ } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vsbc_vvm_i8m8(vint8m8_t op1, vint8m8_t op2, vbool1_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vsbc_vxm_i8m8(vint8m8_t op1, int8_t op2, vbool1_t borrowin, size_ } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vsbc_vvm_i16m1(vint16m1_t op1, vint16m1_t op2, vbool16_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vsbc_vxm_i16m1(vint16m1_t op1, int16_t op2, vbool16_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vsbc_vvm_i16m2(vint16m2_t op1, vint16m2_t op2, vbool8_t borrowin } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vsbc_vxm_i16m2(vint16m2_t op1, int16_t op2, vbool8_t borrowin, s } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vsbc_vvm_i16m4(vint16m4_t op1, vint16m4_t op2, vbool4_t borrowin } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vsbc_vxm_i16m4(vint16m4_t op1, int16_t op2, vbool4_t borrowin, s } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vsbc_vvm_i16m8(vint16m8_t op1, vint16m8_t op2, vbool2_t borrowin } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vsbc_vxm_i16m8(vint16m8_t op1, int16_t op2, vbool2_t borrowin, s } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vsbc_vvm_i32m1(vint32m1_t op1, vint32m1_t op2, vbool32_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vsbc_vxm_i32m1(vint32m1_t op1, int32_t op2, vbool32_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vsbc_vvm_i32m2(vint32m2_t op1, vint32m2_t op2, vbool16_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vsbc_vxm_i32m2(vint32m2_t op1, int32_t op2, vbool16_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vsbc_vvm_i32m4(vint32m4_t op1, vint32m4_t op2, vbool8_t borrowin } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vsbc_vxm_i32m4(vint32m4_t op1, int32_t op2, vbool8_t borrowin, s } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vsbc_vvm_i32m8(vint32m8_t op1, vint32m8_t op2, vbool4_t borrowin } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vsbc_vxm_i32m8(vint32m8_t op1, int32_t op2, vbool4_t borrowin, s } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vsbc_vvm_i64m1(vint64m1_t op1, vint64m1_t op2, vbool64_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vsbc_vxm_i64m1(vint64m1_t op1, int64_t op2, vbool64_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vsbc_vvm_i64m2(vint64m2_t op1, vint64m2_t op2, vbool32_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vsbc_vxm_i64m2(vint64m2_t op1, int64_t op2, vbool32_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vsbc_vvm_i64m4(vint64m4_t op1, vint64m4_t op2, vbool16_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vsbc_vxm_i64m4(vint64m4_t op1, int64_t op2, vbool16_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vsbc_vvm_i64m8(vint64m8_t op1, vint64m8_t op2, vbool8_t borrowin } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vsbc_vxm_i64m8(vint64m8_t op1, int64_t op2, vbool8_t borrowin, s } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vsbc_vvm_u8m1(vuint8m1_t op1, vuint8m1_t op2, vbool8_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vsbc_vxm_u8m1(vuint8m1_t op1, uint8_t op2, vbool8_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vsbc_vvm_u8m2(vuint8m2_t op1, vuint8m2_t op2, vbool4_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vsbc_vxm_u8m2(vuint8m2_t op1, uint8_t op2, vbool4_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vsbc_vvm_u8m4(vuint8m4_t op1, vuint8m4_t op2, vbool2_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vsbc_vxm_u8m4(vuint8m4_t op1, uint8_t op2, vbool2_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vsbc_vvm_u8m8(vuint8m8_t op1, vuint8m8_t op2, vbool1_t borrowin, } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vsbc_vxm_u8m8(vuint8m8_t op1, uint8_t op2, vbool1_t borrowin, si } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vsbc_vvm_u16m1(vuint16m1_t op1, vuint16m1_t op2, vbool16_t borr } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vsbc_vxm_u16m1(vuint16m1_t op1, uint16_t op2, vbool16_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vsbc_vvm_u16m2(vuint16m2_t op1, vuint16m2_t op2, vbool8_t borro } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vsbc_vxm_u16m2(vuint16m2_t op1, uint16_t op2, vbool8_t borrowin } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vsbc_vvm_u16m4(vuint16m4_t op1, vuint16m4_t op2, vbool4_t borro } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vsbc_vxm_u16m4(vuint16m4_t op1, uint16_t op2, vbool4_t borrowin } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vsbc_vvm_u16m8(vuint16m8_t op1, vuint16m8_t op2, vbool2_t borro } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vsbc_vxm_u16m8(vuint16m8_t op1, uint16_t op2, vbool2_t borrowin } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vsbc_vvm_u32m1(vuint32m1_t op1, vuint32m1_t op2, vbool32_t borr } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vsbc_vxm_u32m1(vuint32m1_t op1, uint32_t op2, vbool32_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vsbc_vvm_u32m2(vuint32m2_t op1, vuint32m2_t op2, vbool16_t borr } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vsbc_vxm_u32m2(vuint32m2_t op1, uint32_t op2, vbool16_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vsbc_vvm_u32m4(vuint32m4_t op1, vuint32m4_t op2, vbool8_t borro } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vsbc_vxm_u32m4(vuint32m4_t op1, uint32_t op2, vbool8_t borrowin } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vsbc_vvm_u32m8(vuint32m8_t op1, vuint32m8_t op2, vbool4_t borro } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vsbc_vxm_u32m8(vuint32m8_t op1, uint32_t op2, vbool4_t borrowin } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vsbc_vvm_u64m1(vuint64m1_t op1, vuint64m1_t op2, vbool64_t borr } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vsbc_vxm_u64m1(vuint64m1_t op1, uint64_t op2, vbool64_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vsbc_vvm_u64m2(vuint64m2_t op1, vuint64m2_t op2, vbool32_t borr } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vsbc_vxm_u64m2(vuint64m2_t op1, uint64_t op2, vbool32_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vsbc_vvm_u64m4(vuint64m4_t op1, vuint64m4_t op2, vbool16_t borr } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vsbc_vxm_u64m4(vuint64m4_t op1, uint64_t op2, vbool16_t borrowi } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vvm_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vsbc_vvm_u64m8(vuint64m8_t op1, vuint64m8_t op2, vbool8_t borro } // CHECK-RV64-LABEL: define dso_local @test_vsbc_vxm_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[BORROWIN:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsbc.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[BORROWIN]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vand.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vand.c index eccf3fd783aec6..5c8bb92a2b2113 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vand.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vand.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vand_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vand_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vand_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vand_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vand_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vand_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vand_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vand_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vand_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vand_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vand_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vand_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vand_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vand_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vand_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vand_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vand_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vand_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vand_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vand_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vand_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vand_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vand_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vand_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vand_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vand_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vand_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vand_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vand_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vand_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vand_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vand_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vand_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vand_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vand_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vand_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vand_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vand_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vand_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vand_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vand_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vand_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vand_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vand_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vand_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vand_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vand_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vand_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vand_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vand_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vand_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vand_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vand_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vand_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vand_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vand_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vand_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vand_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vand_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vand_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vand_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vand_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vand_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vnot.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vnot.c index 3d30b74e962094..627f89184546f1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vnot.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vnot.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vnot_v_i8m1(vint8m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_vnot_v_i8m2(vint8m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_vnot_v_i8m4(vint8m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_vnot_v_i8m8(vint8m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vnot_v_i16m1(vint16m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_vnot_v_i16m2(vint16m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_vnot_v_i16m4(vint16m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_vnot_v_i16m8(vint16m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vnot_v_i32m1(vint32m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_vnot_v_i32m2(vint32m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_vnot_v_i32m4(vint32m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_vnot_v_i32m8(vint32m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vnot_v_i64m1(vint64m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_vnot_v_i64m2(vint64m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_vnot_v_i64m4(vint64m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m8_t test_vnot_v_i64m8(vint64m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vnot_v_u8m1(vuint8m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m2_t test_vnot_v_u8m2(vuint8m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m4_t test_vnot_v_u8m4(vuint8m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m8_t test_vnot_v_u8m8(vuint8m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vnot_v_u16m1(vuint16m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m2_t test_vnot_v_u16m2(vuint16m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m4_t test_vnot_v_u16m4(vuint16m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m8_t test_vnot_v_u16m8(vuint16m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vnot_v_u32m1(vuint32m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m2_t test_vnot_v_u32m2(vuint32m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m4_t test_vnot_v_u32m4(vuint32m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m8_t test_vnot_v_u32m8(vuint32m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vnot_v_u64m1(vuint64m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_vnot_v_u64m2(vuint64m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m4_t test_vnot_v_u64m4(vuint64m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vor.c index 7005865a9fa381..a97324ba86e9fb 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vor_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vor_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vor_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vor_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vor_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vor_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vor_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vor_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vor_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vor_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vor_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vor_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vor_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vor_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vor_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vor_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vor_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vor_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vor_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vor_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vor_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vor_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vor_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vor_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vor_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vor_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vor_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vor_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vor_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vor_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vor_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vor_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vor_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vor_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vor_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vor_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vor_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vor_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vor_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vor_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vor_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vor_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vor_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vor_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vor_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vor_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vor_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vor_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vor_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vor_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vor_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vor_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vor_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vor_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vor_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vor_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vor_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vor_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vor_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vor_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vor_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vor_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vor_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vxor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vxor.c index 8e17764a2ca90a..6c188e5b0c856f 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vxor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/thead/vxor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vxor_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vxor_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vxor_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vxor_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vxor_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vxor_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vxor_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vxor_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vxor_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vxor_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vxor_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vxor_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vxor_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vxor_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vxor_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vxor_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vxor_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vxor_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vxor_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vxor_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vxor_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vxor_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vxor_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vxor_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vxor_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vxor_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vxor_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vxor_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vxor_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vxor_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vxor_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vxor_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vxor_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vxor_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vxor_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vxor_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vxor_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vxor_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vxor_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vxor_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vxor_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vxor_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vxor_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vxor_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vxor_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vxor_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vxor_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vxor_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vxor_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vxor_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vxor_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vxor_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vxor_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vxor_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vxor_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vxor_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vxor_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vxor_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vxor_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vxor_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vxor_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vxor_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vxor_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vand.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vand.c index 417cadfc9b6f61..bc40564ea69327 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vand.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vand.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vand_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vand_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vand_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vand_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vand_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vand_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vand_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vand_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vand_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vand_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vand_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vand_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vand_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vand_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vand_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vand_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vand_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vand_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vand_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vand_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vand_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vand_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vand_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vand_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vand_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vand_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vand_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vand_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vand_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vand_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vand_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vand_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vand_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vand_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vand_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vand_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vand_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vand_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vand_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vand_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vand_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vand_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vand_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vand_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vand_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vand_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vand_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vand_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vand_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vand_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vand_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vand_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vand_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vand_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vand_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vand_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vand_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vand_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vand_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vand_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vand_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vand_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vand_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vand_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vand.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vnot.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vnot.c index cf03bc0f5accf1..eadaa418a79a81 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vnot.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vnot.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vnot_v_i8m1(vint8m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_vnot_v_i8m2(vint8m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_vnot_v_i8m4(vint8m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_vnot_v_i8m8(vint8m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vnot_v_i16m1(vint16m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_vnot_v_i16m2(vint16m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_vnot_v_i16m4(vint16m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_vnot_v_i16m8(vint16m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vnot_v_i32m1(vint32m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_vnot_v_i32m2(vint32m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_vnot_v_i32m4(vint32m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_vnot_v_i32m8(vint32m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vnot_v_i64m1(vint64m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_vnot_v_i64m2(vint64m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_vnot_v_i64m4(vint64m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m8_t test_vnot_v_i64m8(vint64m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vnot_v_u8m1(vuint8m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m2_t test_vnot_v_u8m2(vuint8m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m4_t test_vnot_v_u8m4(vuint8m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.i8.i64( poison, [[OP1]], i8 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m8_t test_vnot_v_u8m8(vuint8m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vnot_v_u16m1(vuint16m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m2_t test_vnot_v_u16m2(vuint16m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m4_t test_vnot_v_u16m4(vuint16m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.i16.i64( poison, [[OP1]], i16 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m8_t test_vnot_v_u16m8(vuint16m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vnot_v_u32m1(vuint32m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m2_t test_vnot_v_u32m2(vuint32m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m4_t test_vnot_v_u32m4(vuint32m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.i32.i64( poison, [[OP1]], i32 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m8_t test_vnot_v_u32m8(vuint32m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vnot_v_u64m1(vuint64m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_vnot_v_u64m2(vuint64m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m4_t test_vnot_v_u64m4(vuint64m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnot_v_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.i64.i64( poison, [[OP1]], i64 -1, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vor.c index e2733ec2cd6d37..702707c9894b22 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vor_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vor_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vor_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vor_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vor_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vor_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vor_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vor_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vor_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vor_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vor_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vor_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vor_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vor_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vor_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vor_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vor_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vor_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vor_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vor_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vor_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vor_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vor_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vor_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vor_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vor_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vor_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vor_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vor_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vor_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vor_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vor_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vor_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vor_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vor_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vor_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vor_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vor_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vor_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vor_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vor_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vor_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vor_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vor_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vor_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vor_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vor_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vor_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vor_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vor_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vor_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vor_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vor_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vor_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vor_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vor_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vor_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vor_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vor_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vor_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vor_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vor_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vor_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vor_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vor.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vxor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vxor.c index 31d922e437415e..1858cadcec1496 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vxor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-bitwide-logical/wrappers/vxor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vxor_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vxor_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vxor_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vxor_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vxor_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vxor_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vxor_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vxor_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vxor_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vxor_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vxor_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vxor_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vxor_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vxor_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vxor_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vxor_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vxor_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vxor_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vxor_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vxor_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vxor_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vxor_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vxor_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vxor_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vxor_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vxor_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vxor_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vxor_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vxor_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vxor_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vxor_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vxor_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vxor_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vxor_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vxor_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vxor_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vxor_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vxor_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vxor_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vxor_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vxor_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vxor_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vxor_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vxor_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vxor_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vxor_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vxor_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vxor_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vxor_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vxor_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vxor_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vxor_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vxor_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vxor_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vxor_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vxor_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vxor_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vxor_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vxor_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vxor_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vxor_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vxor_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vxor_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vxor_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vxor.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmseq.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmseq.c index b6fdfd8a16384c..21b3f381d0b0be 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmseq.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmseq.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmseq_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmseq_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmseq_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmseq_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmseq_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmseq_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmseq_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmseq_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmseq_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmseq_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmseq_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmseq_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmseq_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmseq_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmseq_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmseq_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmseq_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmseq_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmseq_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmseq_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmseq_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmseq_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmseq_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmseq_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmseq_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmseq_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmseq_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmseq_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmseq_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmseq_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmseq_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmseq_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmseq_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmseq_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmseq_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmseq_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmseq_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmseq_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmseq_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmseq_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmseq_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmseq_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmseq_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmseq_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmseq_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmseq_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmseq_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmseq_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmseq_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmseq_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmseq_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmseq_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmseq_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmseq_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmseq_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmseq_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmseq_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmseq_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmseq_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmseq_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmseq_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmseq_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmseq_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -646,7 +647,7 @@ vbool8_t test_vmseq_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -656,7 +657,7 @@ vbool8_t test_vmseq_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -666,7 +667,7 @@ vbool8_t test_vmseq_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -676,7 +677,7 @@ vbool4_t test_vmseq_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -686,7 +687,7 @@ vbool4_t test_vmseq_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -696,7 +697,7 @@ vbool2_t test_vmseq_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -706,7 +707,7 @@ vbool2_t test_vmseq_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -716,7 +717,7 @@ vbool1_t test_vmseq_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -726,7 +727,7 @@ vbool1_t test_vmseq_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -736,7 +737,7 @@ vbool16_t test_vmseq_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -746,7 +747,7 @@ vbool16_t test_vmseq_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -756,7 +757,7 @@ vbool8_t test_vmseq_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -766,7 +767,7 @@ vbool8_t test_vmseq_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -776,7 +777,7 @@ vbool4_t test_vmseq_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -786,7 +787,7 @@ vbool4_t test_vmseq_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -796,7 +797,7 @@ vbool2_t test_vmseq_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -806,7 +807,7 @@ vbool2_t test_vmseq_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -816,7 +817,7 @@ vbool32_t test_vmseq_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -826,7 +827,7 @@ vbool32_t test_vmseq_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -836,7 +837,7 @@ vbool16_t test_vmseq_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -846,7 +847,7 @@ vbool16_t test_vmseq_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -856,7 +857,7 @@ vbool8_t test_vmseq_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -866,7 +867,7 @@ vbool8_t test_vmseq_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -876,7 +877,7 @@ vbool4_t test_vmseq_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -886,7 +887,7 @@ vbool4_t test_vmseq_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -896,7 +897,7 @@ vbool64_t test_vmseq_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -906,7 +907,7 @@ vbool64_t test_vmseq_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -916,7 +917,7 @@ vbool32_t test_vmseq_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -926,7 +927,7 @@ vbool32_t test_vmseq_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -936,7 +937,7 @@ vbool16_t test_vmseq_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -946,7 +947,7 @@ vbool16_t test_vmseq_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -956,7 +957,7 @@ vbool8_t test_vmseq_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -966,7 +967,7 @@ vbool8_t test_vmseq_vx_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -976,7 +977,7 @@ vbool8_t test_vmseq_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -986,7 +987,7 @@ vbool8_t test_vmseq_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -996,7 +997,7 @@ vbool4_t test_vmseq_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1006,7 +1007,7 @@ vbool4_t test_vmseq_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1016,7 +1017,7 @@ vbool2_t test_vmseq_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1026,7 +1027,7 @@ vbool2_t test_vmseq_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1036,7 +1037,7 @@ vbool1_t test_vmseq_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1046,7 +1047,7 @@ vbool1_t test_vmseq_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1056,7 +1057,7 @@ vbool16_t test_vmseq_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1066,7 +1067,7 @@ vbool16_t test_vmseq_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1076,7 +1077,7 @@ vbool8_t test_vmseq_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1086,7 +1087,7 @@ vbool8_t test_vmseq_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1096,7 +1097,7 @@ vbool4_t test_vmseq_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1106,7 +1107,7 @@ vbool4_t test_vmseq_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1116,7 +1117,7 @@ vbool2_t test_vmseq_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1126,7 +1127,7 @@ vbool2_t test_vmseq_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1136,7 +1137,7 @@ vbool32_t test_vmseq_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1146,7 +1147,7 @@ vbool32_t test_vmseq_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1156,7 +1157,7 @@ vbool16_t test_vmseq_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1166,7 +1167,7 @@ vbool16_t test_vmseq_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1176,7 +1177,7 @@ vbool8_t test_vmseq_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1186,7 +1187,7 @@ vbool8_t test_vmseq_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1196,7 +1197,7 @@ vbool4_t test_vmseq_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1206,7 +1207,7 @@ vbool4_t test_vmseq_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1216,7 +1217,7 @@ vbool64_t test_vmseq_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1226,7 +1227,7 @@ vbool64_t test_vmseq_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1236,7 +1237,7 @@ vbool32_t test_vmseq_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1246,7 +1247,7 @@ vbool32_t test_vmseq_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1256,7 +1257,7 @@ vbool16_t test_vmseq_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1266,7 +1267,7 @@ vbool16_t test_vmseq_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1276,7 +1277,7 @@ vbool8_t test_vmseq_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsge.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsge.c index 8fcaeb15217de6..9564e574b0091b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsge.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsge.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsge_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsge_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsge_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsge_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsge_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsge_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsge_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsge_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsge_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsge_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsge_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsge_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsge_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsge_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsge_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsge_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsge_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsge_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsge_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsge_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsge_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsge_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsge_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsge_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsge_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsge_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsge_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsge_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsge_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsge_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsge_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsge_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsge_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsge_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsge_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsge_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsge_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsge_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsge_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsge_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsge_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsge_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsge_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsge_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsge_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsge_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsge_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsge_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsge_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsge_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsge_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsge_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsge_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsge_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsge_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsge_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsge_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsge_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsge_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsge_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsge_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsge_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsge_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgeu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgeu.c index b45905416e8441..a79c62d201286b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgeu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgeu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsgeu_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsgeu_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsgeu_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsgeu_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsgeu_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsgeu_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsgeu_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsgeu_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsgeu_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsgeu_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsgeu_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsgeu_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsgeu_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsgeu_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsgeu_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsgeu_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsgeu_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsgeu_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsgeu_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsgeu_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsgeu_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsgeu_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsgeu_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsgeu_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsgeu_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsgeu_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsgeu_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsgeu_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsgeu_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsgeu_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsgeu_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsgeu_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsgeu_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsgeu_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsgeu_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsgeu_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsgeu_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsgeu_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsgeu_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsgeu_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsgeu_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsgeu_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsgeu_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsgeu_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsgeu_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsgeu_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsgeu_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsgeu_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsgeu_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsgeu_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsgeu_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsgeu_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsgeu_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsgeu_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsgeu_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsgeu_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsgeu_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsgeu_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsgeu_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsgeu_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsgeu_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsgeu_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsgeu_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgt.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgt.c index 7f86c067549734..2b7b3011e1b29b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgt.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgt.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsgt_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsgt_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsgt_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsgt_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsgt_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsgt_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsgt_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsgt_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsgt_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsgt_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsgt_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsgt_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsgt_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsgt_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsgt_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsgt_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsgt_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsgt_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsgt_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsgt_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsgt_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsgt_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsgt_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsgt_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsgt_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsgt_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsgt_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsgt_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsgt_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsgt_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsgt_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsgt_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsgt_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsgt_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsgt_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsgt_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsgt_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsgt_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsgt_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsgt_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsgt_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsgt_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsgt_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsgt_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsgt_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsgt_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsgt_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsgt_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsgt_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsgt_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsgt_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsgt_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsgt_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsgt_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsgt_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsgt_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsgt_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsgt_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsgt_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsgt_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsgt_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsgt_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsgt_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgtu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgtu.c index 4ada9cf864dbba..38502ebe26041f 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgtu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgtu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsgtu_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsgtu_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsgtu_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsgtu_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsgtu_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsgtu_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsgtu_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsgtu_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsgtu_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsgtu_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsgtu_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsgtu_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsgtu_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsgtu_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsgtu_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsgtu_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsgtu_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsgtu_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsgtu_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsgtu_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsgtu_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsgtu_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsgtu_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsgtu_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsgtu_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsgtu_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsgtu_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsgtu_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsgtu_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsgtu_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsgtu_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsgtu_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsgtu_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsgtu_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsgtu_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsgtu_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsgtu_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsgtu_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsgtu_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsgtu_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsgtu_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsgtu_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsgtu_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsgtu_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsgtu_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsgtu_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsgtu_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsgtu_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsgtu_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsgtu_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsgtu_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsgtu_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsgtu_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsgtu_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsgtu_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsgtu_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsgtu_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsgtu_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsgtu_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsgtu_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsgtu_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsgtu_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsgtu_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmslt.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmslt.c index 86029adda9de67..a66b063b196a20 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmslt.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmslt.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmslt_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmslt_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmslt_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmslt_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmslt_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmslt_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmslt_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmslt_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmslt_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmslt_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmslt_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmslt_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmslt_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmslt_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmslt_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmslt_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmslt_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmslt_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmslt_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmslt_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmslt_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmslt_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmslt_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmslt_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmslt_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmslt_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmslt_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmslt_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmslt_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmslt_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmslt_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmslt_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmslt_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmslt_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmslt_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmslt_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmslt_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmslt_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmslt_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmslt_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmslt_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmslt_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmslt_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmslt_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmslt_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmslt_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmslt_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmslt_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmslt_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmslt_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmslt_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmslt_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmslt_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmslt_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmslt_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmslt_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmslt_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmslt_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmslt_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmslt_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmslt_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmslt_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmslt_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsltu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsltu.c index 4b71242544570a..a338a7e127fba0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsltu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsltu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsltu_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsltu_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsltu_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsltu_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsltu_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsltu_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsltu_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsltu_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsltu_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsltu_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsltu_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsltu_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsltu_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsltu_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsltu_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsltu_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsltu_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsltu_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsltu_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsltu_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsltu_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsltu_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsltu_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsltu_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsltu_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsltu_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsltu_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsltu_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsltu_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsltu_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsltu_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsltu_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsltu_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsltu_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsltu_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsltu_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsltu_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsltu_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsltu_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsltu_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsltu_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsltu_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsltu_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsltu_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsltu_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsltu_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsltu_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsltu_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsltu_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsltu_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsltu_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsltu_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsltu_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsltu_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsltu_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsltu_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsltu_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsltu_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsltu_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsltu_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsltu_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsltu_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsltu_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsne.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsne.c index 7831154dd082ce..586d319141a9e5 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsne.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsne.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsne_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsne_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsne_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsne_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsne_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsne_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsne_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsne_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsne_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsne_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsne_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsne_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsne_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsne_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsne_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsne_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsne_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsne_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsne_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsne_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsne_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsne_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsne_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsne_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsne_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsne_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsne_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsne_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsne_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsne_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsne_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsne_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsne_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsne_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsne_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsne_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsne_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsne_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsne_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsne_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsne_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsne_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsne_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsne_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsne_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsne_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsne_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsne_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsne_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsne_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsne_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsne_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsne_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsne_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsne_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsne_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsne_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsne_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsne_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsne_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsne_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsne_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsne_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -646,7 +647,7 @@ vbool8_t test_vmsne_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -656,7 +657,7 @@ vbool8_t test_vmsne_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -666,7 +667,7 @@ vbool8_t test_vmsne_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -676,7 +677,7 @@ vbool4_t test_vmsne_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -686,7 +687,7 @@ vbool4_t test_vmsne_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -696,7 +697,7 @@ vbool2_t test_vmsne_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -706,7 +707,7 @@ vbool2_t test_vmsne_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -716,7 +717,7 @@ vbool1_t test_vmsne_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -726,7 +727,7 @@ vbool1_t test_vmsne_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -736,7 +737,7 @@ vbool16_t test_vmsne_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -746,7 +747,7 @@ vbool16_t test_vmsne_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -756,7 +757,7 @@ vbool8_t test_vmsne_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -766,7 +767,7 @@ vbool8_t test_vmsne_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -776,7 +777,7 @@ vbool4_t test_vmsne_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -786,7 +787,7 @@ vbool4_t test_vmsne_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -796,7 +797,7 @@ vbool2_t test_vmsne_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -806,7 +807,7 @@ vbool2_t test_vmsne_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -816,7 +817,7 @@ vbool32_t test_vmsne_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -826,7 +827,7 @@ vbool32_t test_vmsne_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -836,7 +837,7 @@ vbool16_t test_vmsne_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -846,7 +847,7 @@ vbool16_t test_vmsne_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -856,7 +857,7 @@ vbool8_t test_vmsne_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -866,7 +867,7 @@ vbool8_t test_vmsne_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -876,7 +877,7 @@ vbool4_t test_vmsne_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -886,7 +887,7 @@ vbool4_t test_vmsne_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -896,7 +897,7 @@ vbool64_t test_vmsne_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -906,7 +907,7 @@ vbool64_t test_vmsne_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -916,7 +917,7 @@ vbool32_t test_vmsne_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -926,7 +927,7 @@ vbool32_t test_vmsne_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -936,7 +937,7 @@ vbool16_t test_vmsne_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -946,7 +947,7 @@ vbool16_t test_vmsne_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -956,7 +957,7 @@ vbool8_t test_vmsne_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -966,7 +967,7 @@ vbool8_t test_vmsne_vx_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -976,7 +977,7 @@ vbool8_t test_vmsne_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -986,7 +987,7 @@ vbool8_t test_vmsne_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -996,7 +997,7 @@ vbool4_t test_vmsne_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1006,7 +1007,7 @@ vbool4_t test_vmsne_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1016,7 +1017,7 @@ vbool2_t test_vmsne_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1026,7 +1027,7 @@ vbool2_t test_vmsne_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1036,7 +1037,7 @@ vbool1_t test_vmsne_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1046,7 +1047,7 @@ vbool1_t test_vmsne_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1056,7 +1057,7 @@ vbool16_t test_vmsne_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1066,7 +1067,7 @@ vbool16_t test_vmsne_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1076,7 +1077,7 @@ vbool8_t test_vmsne_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1086,7 +1087,7 @@ vbool8_t test_vmsne_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1096,7 +1097,7 @@ vbool4_t test_vmsne_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1106,7 +1107,7 @@ vbool4_t test_vmsne_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1116,7 +1117,7 @@ vbool2_t test_vmsne_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1126,7 +1127,7 @@ vbool2_t test_vmsne_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1136,7 +1137,7 @@ vbool32_t test_vmsne_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1146,7 +1147,7 @@ vbool32_t test_vmsne_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1156,7 +1157,7 @@ vbool16_t test_vmsne_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1166,7 +1167,7 @@ vbool16_t test_vmsne_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1176,7 +1177,7 @@ vbool8_t test_vmsne_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1186,7 +1187,7 @@ vbool8_t test_vmsne_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1196,7 +1197,7 @@ vbool4_t test_vmsne_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1206,7 +1207,7 @@ vbool4_t test_vmsne_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1216,7 +1217,7 @@ vbool64_t test_vmsne_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1226,7 +1227,7 @@ vbool64_t test_vmsne_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1236,7 +1237,7 @@ vbool32_t test_vmsne_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1246,7 +1247,7 @@ vbool32_t test_vmsne_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1256,7 +1257,7 @@ vbool16_t test_vmsne_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1266,7 +1267,7 @@ vbool16_t test_vmsne_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1276,7 +1277,7 @@ vbool8_t test_vmsne_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmseq.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmseq.c index d29ea5c9b0c551..929137f4b0d7c4 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmseq.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmseq.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmseq_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmseq_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmseq_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmseq_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmseq_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmseq_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmseq_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmseq_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmseq_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmseq_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmseq_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmseq_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmseq_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmseq_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmseq_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmseq_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmseq_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmseq_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmseq_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmseq_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmseq_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmseq_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmseq_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmseq_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmseq_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmseq_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmseq_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmseq_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmseq_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmseq_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmseq_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmseq_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmseq_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmseq_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmseq_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmseq_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmseq_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmseq_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmseq_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmseq_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmseq_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmseq_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmseq_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmseq_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmseq_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmseq_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmseq_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmseq_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmseq_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmseq_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmseq_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmseq_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmseq_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmseq_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmseq_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmseq_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmseq_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmseq_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmseq_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmseq_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmseq_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmseq_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmseq_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -646,7 +647,7 @@ vbool8_t test_vmseq_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -656,7 +657,7 @@ vbool8_t test_vmseq_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -666,7 +667,7 @@ vbool8_t test_vmseq_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -676,7 +677,7 @@ vbool4_t test_vmseq_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -686,7 +687,7 @@ vbool4_t test_vmseq_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -696,7 +697,7 @@ vbool2_t test_vmseq_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -706,7 +707,7 @@ vbool2_t test_vmseq_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -716,7 +717,7 @@ vbool1_t test_vmseq_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -726,7 +727,7 @@ vbool1_t test_vmseq_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -736,7 +737,7 @@ vbool16_t test_vmseq_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -746,7 +747,7 @@ vbool16_t test_vmseq_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -756,7 +757,7 @@ vbool8_t test_vmseq_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -766,7 +767,7 @@ vbool8_t test_vmseq_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -776,7 +777,7 @@ vbool4_t test_vmseq_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -786,7 +787,7 @@ vbool4_t test_vmseq_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -796,7 +797,7 @@ vbool2_t test_vmseq_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -806,7 +807,7 @@ vbool2_t test_vmseq_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -816,7 +817,7 @@ vbool32_t test_vmseq_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -826,7 +827,7 @@ vbool32_t test_vmseq_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -836,7 +837,7 @@ vbool16_t test_vmseq_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -846,7 +847,7 @@ vbool16_t test_vmseq_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -856,7 +857,7 @@ vbool8_t test_vmseq_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -866,7 +867,7 @@ vbool8_t test_vmseq_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -876,7 +877,7 @@ vbool4_t test_vmseq_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -886,7 +887,7 @@ vbool4_t test_vmseq_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -896,7 +897,7 @@ vbool64_t test_vmseq_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -906,7 +907,7 @@ vbool64_t test_vmseq_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -916,7 +917,7 @@ vbool32_t test_vmseq_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -926,7 +927,7 @@ vbool32_t test_vmseq_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -936,7 +937,7 @@ vbool16_t test_vmseq_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -946,7 +947,7 @@ vbool16_t test_vmseq_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -956,7 +957,7 @@ vbool8_t test_vmseq_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -966,7 +967,7 @@ vbool8_t test_vmseq_vx_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -976,7 +977,7 @@ vbool8_t test_vmseq_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -986,7 +987,7 @@ vbool8_t test_vmseq_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -996,7 +997,7 @@ vbool4_t test_vmseq_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1006,7 +1007,7 @@ vbool4_t test_vmseq_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1016,7 +1017,7 @@ vbool2_t test_vmseq_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1026,7 +1027,7 @@ vbool2_t test_vmseq_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1036,7 +1037,7 @@ vbool1_t test_vmseq_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1046,7 +1047,7 @@ vbool1_t test_vmseq_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1056,7 +1057,7 @@ vbool16_t test_vmseq_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1066,7 +1067,7 @@ vbool16_t test_vmseq_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1076,7 +1077,7 @@ vbool8_t test_vmseq_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1086,7 +1087,7 @@ vbool8_t test_vmseq_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1096,7 +1097,7 @@ vbool4_t test_vmseq_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1106,7 +1107,7 @@ vbool4_t test_vmseq_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1116,7 +1117,7 @@ vbool2_t test_vmseq_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1126,7 +1127,7 @@ vbool2_t test_vmseq_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1136,7 +1137,7 @@ vbool32_t test_vmseq_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1146,7 +1147,7 @@ vbool32_t test_vmseq_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1156,7 +1157,7 @@ vbool16_t test_vmseq_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1166,7 +1167,7 @@ vbool16_t test_vmseq_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1176,7 +1177,7 @@ vbool8_t test_vmseq_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1186,7 +1187,7 @@ vbool8_t test_vmseq_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1196,7 +1197,7 @@ vbool4_t test_vmseq_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1206,7 +1207,7 @@ vbool4_t test_vmseq_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1216,7 +1217,7 @@ vbool64_t test_vmseq_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1226,7 +1227,7 @@ vbool64_t test_vmseq_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1236,7 +1237,7 @@ vbool32_t test_vmseq_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1246,7 +1247,7 @@ vbool32_t test_vmseq_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1256,7 +1257,7 @@ vbool16_t test_vmseq_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1266,7 +1267,7 @@ vbool16_t test_vmseq_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1276,7 +1277,7 @@ vbool8_t test_vmseq_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsge.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsge.c index 1011d702c156e7..183fb7a7c74ea0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsge.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsge.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsge_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsge_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsge_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsge_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsge_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsge_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsge_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsge_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsge_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsge_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsge_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsge_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsge_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsge_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsge_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsge_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsge_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsge_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsge_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsge_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsge_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsge_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsge_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsge_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsge_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsge_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsge_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsge_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsge_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsge_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsge_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsge_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsge_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsge_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsge_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsge_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsge_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsge_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsge_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsge_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsge_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsge_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsge_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsge_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsge_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsge_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsge_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsge_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsge_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsge_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsge_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsge_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsge_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsge_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsge_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsge_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsge_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsge_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsge_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsge_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsge_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsge_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsge_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgeu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgeu.c index 7828dcc3cd9e28..8e09795f7d2764 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgeu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgeu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsgeu_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsgeu_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsgeu_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsgeu_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsgeu_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsgeu_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsgeu_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsgeu_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsgeu_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsgeu_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsgeu_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsgeu_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsgeu_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsgeu_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsgeu_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsgeu_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsgeu_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsgeu_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsgeu_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsgeu_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsgeu_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsgeu_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsgeu_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsgeu_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsgeu_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsgeu_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsgeu_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsgeu_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsgeu_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsgeu_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsgeu_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsgeu_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsgeu_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsgeu_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsgeu_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsgeu_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsgeu_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsgeu_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsgeu_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsgeu_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsgeu_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsgeu_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsgeu_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsgeu_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsgeu_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsgeu_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsgeu_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsgeu_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsgeu_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsgeu_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsgeu_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsgeu_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsgeu_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsgeu_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsgeu_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsgeu_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsgeu_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsgeu_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsgeu_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsgeu_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsgeu_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsgeu_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsgeu_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgt.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgt.c index ec07f4041dbb00..ce4b06a31a868d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgt.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgt.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsgt_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsgt_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsgt_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsgt_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsgt_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsgt_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsgt_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsgt_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsgt_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsgt_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsgt_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsgt_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsgt_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsgt_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsgt_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsgt_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsgt_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsgt_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsgt_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsgt_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsgt_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsgt_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsgt_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsgt_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsgt_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsgt_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsgt_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsgt_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsgt_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsgt_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsgt_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsgt_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsgt_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsgt_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsgt_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsgt_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsgt_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsgt_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsgt_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsgt_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsgt_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsgt_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsgt_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsgt_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsgt_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsgt_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsgt_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsgt_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsgt_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsgt_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsgt_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsgt_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsgt_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsgt_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsgt_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsgt_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsgt_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsgt_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsgt_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsgt_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsgt_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsgt_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsgt_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgtu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgtu.c index 80d0302ccf2131..61d027f7b3ea11 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgtu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsgtu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsgtu_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsgtu_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsgtu_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsgtu_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsgtu_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsgtu_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsgtu_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsgtu_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsgtu_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsgtu_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsgtu_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsgtu_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsgtu_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsgtu_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsgtu_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsgtu_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsgtu_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsgtu_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsgtu_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsgtu_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsgtu_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsgtu_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsgtu_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsgtu_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsgtu_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsgtu_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsgtu_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsgtu_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsgtu_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsgtu_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsgtu_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsgtu_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsgtu_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsgtu_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsgtu_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsgtu_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsgtu_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsgtu_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsgtu_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsgtu_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsgtu_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsgtu_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsgtu_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsgtu_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsgtu_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsgtu_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsgtu_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsgtu_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsgtu_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsgtu_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsgtu_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsgtu_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsgtu_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsgtu_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsgtu_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsgtu_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsgtu_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsgtu_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsgtu_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsgtu_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsgtu_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_ } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsgtu_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsgtu_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmslt.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmslt.c index b94c5d40070f6c..0e814c3b70321e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmslt.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmslt.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmslt_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmslt_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmslt_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmslt_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmslt_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmslt_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmslt_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmslt_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmslt_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmslt_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmslt_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmslt_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmslt_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmslt_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmslt_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmslt_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmslt_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmslt_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmslt_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmslt_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmslt_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmslt_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmslt_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmslt_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmslt_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmslt_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmslt_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmslt_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmslt_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmslt_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmslt_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmslt_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmslt_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmslt_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmslt_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmslt_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmslt_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmslt_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmslt_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmslt_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmslt_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmslt_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmslt_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmslt_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmslt_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmslt_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmslt_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmslt_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmslt_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmslt_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmslt_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmslt_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmslt_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmslt_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmslt_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmslt_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmslt_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmslt_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmslt_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmslt_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmslt_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmslt_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmslt_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsltu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsltu.c index dd5f7be2f30978..538f9b447913d0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsltu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsltu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsltu_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsltu_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsltu_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsltu_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsltu_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsltu_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsltu_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsltu_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsltu_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsltu_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsltu_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsltu_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsltu_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsltu_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsltu_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsltu_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsltu_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsltu_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsltu_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsltu_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsltu_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsltu_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsltu_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsltu_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsltu_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsltu_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsltu_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsltu_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsltu_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsltu_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsltu_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsltu_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsltu_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsltu_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsltu_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsltu_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsltu_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsltu_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsltu_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsltu_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsltu_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsltu_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsltu_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsltu_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsltu_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsltu_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsltu_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsltu_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsltu_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsltu_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsltu_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsltu_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsltu_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsltu_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsltu_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsltu_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsltu_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsltu_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsltu_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsltu_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsltu_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_ } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsltu_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsltu_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsne.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsne.c index 0d204943841d71..f3536bb7e653e6 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsne.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/wrappers/vmsne.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool8_t test_vmsne_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool8_t test_vmsne_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsne_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool4_t test_vmsne_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool2_t test_vmsne_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsne_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool1_t test_vmsne_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vbool1_t test_vmsne_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vbool16_t test_vmsne_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vbool16_t test_vmsne_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vbool8_t test_vmsne_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vbool8_t test_vmsne_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vbool4_t test_vmsne_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vbool4_t test_vmsne_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vbool2_t test_vmsne_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vbool2_t test_vmsne_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vbool32_t test_vmsne_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vbool32_t test_vmsne_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vbool16_t test_vmsne_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vbool16_t test_vmsne_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vbool8_t test_vmsne_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vbool8_t test_vmsne_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vbool4_t test_vmsne_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vbool4_t test_vmsne_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vbool64_t test_vmsne_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vbool64_t test_vmsne_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vbool32_t test_vmsne_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vbool32_t test_vmsne_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vbool16_t test_vmsne_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vbool16_t test_vmsne_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vbool8_t test_vmsne_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vbool8_t test_vmsne_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vbool8_t test_vmsne_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m1_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vbool8_t test_vmsne_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vbool4_t test_vmsne_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m2_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vbool4_t test_vmsne_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vbool2_t test_vmsne_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m4_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vbool2_t test_vmsne_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vbool1_t test_vmsne_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m8_b1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vbool1_t test_vmsne_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vbool16_t test_vmsne_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m1_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vbool16_t test_vmsne_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vbool8_t test_vmsne_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m2_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vbool8_t test_vmsne_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vbool4_t test_vmsne_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m4_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vbool4_t test_vmsne_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vbool2_t test_vmsne_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m8_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vbool2_t test_vmsne_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vbool32_t test_vmsne_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m1_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vbool32_t test_vmsne_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vbool16_t test_vmsne_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m2_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vbool16_t test_vmsne_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vbool8_t test_vmsne_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m4_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vbool8_t test_vmsne_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vbool4_t test_vmsne_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m8_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vbool4_t test_vmsne_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vbool64_t test_vmsne_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m1_b64 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vbool64_t test_vmsne_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vbool32_t test_vmsne_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m2_b32 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vbool32_t test_vmsne_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vbool16_t test_vmsne_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m4_b16 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vbool16_t test_vmsne_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vbool8_t test_vmsne_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m8_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -646,7 +647,7 @@ vbool8_t test_vmsne_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -656,7 +657,7 @@ vbool8_t test_vmsne_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -666,7 +667,7 @@ vbool8_t test_vmsne_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -676,7 +677,7 @@ vbool4_t test_vmsne_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -686,7 +687,7 @@ vbool4_t test_vmsne_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -696,7 +697,7 @@ vbool2_t test_vmsne_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -706,7 +707,7 @@ vbool2_t test_vmsne_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -716,7 +717,7 @@ vbool1_t test_vmsne_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -726,7 +727,7 @@ vbool1_t test_vmsne_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -736,7 +737,7 @@ vbool16_t test_vmsne_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -746,7 +747,7 @@ vbool16_t test_vmsne_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -756,7 +757,7 @@ vbool8_t test_vmsne_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -766,7 +767,7 @@ vbool8_t test_vmsne_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -776,7 +777,7 @@ vbool4_t test_vmsne_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -786,7 +787,7 @@ vbool4_t test_vmsne_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -796,7 +797,7 @@ vbool2_t test_vmsne_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -806,7 +807,7 @@ vbool2_t test_vmsne_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -816,7 +817,7 @@ vbool32_t test_vmsne_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -826,7 +827,7 @@ vbool32_t test_vmsne_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -836,7 +837,7 @@ vbool16_t test_vmsne_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -846,7 +847,7 @@ vbool16_t test_vmsne_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -856,7 +857,7 @@ vbool8_t test_vmsne_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -866,7 +867,7 @@ vbool8_t test_vmsne_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -876,7 +877,7 @@ vbool4_t test_vmsne_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -886,7 +887,7 @@ vbool4_t test_vmsne_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -896,7 +897,7 @@ vbool64_t test_vmsne_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -906,7 +907,7 @@ vbool64_t test_vmsne_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -916,7 +917,7 @@ vbool32_t test_vmsne_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -926,7 +927,7 @@ vbool32_t test_vmsne_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -936,7 +937,7 @@ vbool16_t test_vmsne_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -946,7 +947,7 @@ vbool16_t test_vmsne_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -956,7 +957,7 @@ vbool8_t test_vmsne_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -966,7 +967,7 @@ vbool8_t test_vmsne_vx_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -976,7 +977,7 @@ vbool8_t test_vmsne_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m1_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -986,7 +987,7 @@ vbool8_t test_vmsne_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -996,7 +997,7 @@ vbool4_t test_vmsne_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m2_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1006,7 +1007,7 @@ vbool4_t test_vmsne_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1016,7 +1017,7 @@ vbool2_t test_vmsne_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m4_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1026,7 +1027,7 @@ vbool2_t test_vmsne_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1036,7 +1037,7 @@ vbool1_t test_vmsne_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m8_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1046,7 +1047,7 @@ vbool1_t test_vmsne_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1056,7 +1057,7 @@ vbool16_t test_vmsne_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m1_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1066,7 +1067,7 @@ vbool16_t test_vmsne_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1076,7 +1077,7 @@ vbool8_t test_vmsne_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m2_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1086,7 +1087,7 @@ vbool8_t test_vmsne_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1096,7 +1097,7 @@ vbool4_t test_vmsne_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m4_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1106,7 +1107,7 @@ vbool4_t test_vmsne_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1116,7 +1117,7 @@ vbool2_t test_vmsne_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m8_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1126,7 +1127,7 @@ vbool2_t test_vmsne_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1136,7 +1137,7 @@ vbool32_t test_vmsne_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m1_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1146,7 +1147,7 @@ vbool32_t test_vmsne_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1156,7 +1157,7 @@ vbool16_t test_vmsne_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m2_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1166,7 +1167,7 @@ vbool16_t test_vmsne_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1176,7 +1177,7 @@ vbool8_t test_vmsne_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m4_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1186,7 +1187,7 @@ vbool8_t test_vmsne_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1196,7 +1197,7 @@ vbool4_t test_vmsne_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m8_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1206,7 +1207,7 @@ vbool4_t test_vmsne_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1216,7 +1217,7 @@ vbool64_t test_vmsne_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m1_b64_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1226,7 +1227,7 @@ vbool64_t test_vmsne_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1236,7 +1237,7 @@ vbool32_t test_vmsne_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m2_b32_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1246,7 +1247,7 @@ vbool32_t test_vmsne_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1256,7 +1257,7 @@ vbool16_t test_vmsne_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m4_b16_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1266,7 +1267,7 @@ vbool16_t test_vmsne_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1276,7 +1277,7 @@ vbool8_t test_vmsne_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m8_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-merge/thead/vmerge.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-merge/thead/vmerge.c index 8606beaff27dbc..8d8f4e3a54c4a0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-merge/thead/vmerge.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-merge/thead/vmerge.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vmerge_vvm_i8m1(vint8m1_t op1, vint8m1_t op2, vbool8_t mask, size } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vmerge_vxm_i8m1(vint8m1_t op1, int8_t op2, vbool8_t mask, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vmerge_vvm_i8m2(vint8m2_t op1, vint8m2_t op2, vbool4_t mask, size } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vmerge_vxm_i8m2(vint8m2_t op1, int8_t op2, vbool4_t mask, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vmerge_vvm_i8m4(vint8m4_t op1, vint8m4_t op2, vbool2_t mask, size } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vmerge_vxm_i8m4(vint8m4_t op1, int8_t op2, vbool2_t mask, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vmerge_vvm_i8m8(vint8m8_t op1, vint8m8_t op2, vbool1_t mask, size } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vmerge_vxm_i8m8(vint8m8_t op1, int8_t op2, vbool1_t mask, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vmerge_vvm_i16m1(vint16m1_t op1, vint16m1_t op2, vbool16_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vmerge_vxm_i16m1(vint16m1_t op1, int16_t op2, vbool16_t mask, si } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vmerge_vvm_i16m2(vint16m2_t op1, vint16m2_t op2, vbool8_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vmerge_vxm_i16m2(vint16m2_t op1, int16_t op2, vbool8_t mask, siz } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vmerge_vvm_i16m4(vint16m4_t op1, vint16m4_t op2, vbool4_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vmerge_vxm_i16m4(vint16m4_t op1, int16_t op2, vbool4_t mask, siz } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vmerge_vvm_i16m8(vint16m8_t op1, vint16m8_t op2, vbool2_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vmerge_vxm_i16m8(vint16m8_t op1, int16_t op2, vbool2_t mask, siz } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vmerge_vvm_i32m1(vint32m1_t op1, vint32m1_t op2, vbool32_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vmerge_vxm_i32m1(vint32m1_t op1, int32_t op2, vbool32_t mask, si } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vmerge_vvm_i32m2(vint32m2_t op1, vint32m2_t op2, vbool16_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vmerge_vxm_i32m2(vint32m2_t op1, int32_t op2, vbool16_t mask, si } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vmerge_vvm_i32m4(vint32m4_t op1, vint32m4_t op2, vbool8_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vmerge_vxm_i32m4(vint32m4_t op1, int32_t op2, vbool8_t mask, siz } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vmerge_vvm_i32m8(vint32m8_t op1, vint32m8_t op2, vbool4_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vmerge_vxm_i32m8(vint32m8_t op1, int32_t op2, vbool4_t mask, siz } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vmerge_vvm_i64m1(vint64m1_t op1, vint64m1_t op2, vbool64_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vmerge_vxm_i64m1(vint64m1_t op1, int64_t op2, vbool64_t mask, si } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vmerge_vvm_i64m2(vint64m2_t op1, vint64m2_t op2, vbool32_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vmerge_vxm_i64m2(vint64m2_t op1, int64_t op2, vbool32_t mask, si } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vmerge_vvm_i64m4(vint64m4_t op1, vint64m4_t op2, vbool16_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vmerge_vxm_i64m4(vint64m4_t op1, int64_t op2, vbool16_t mask, si } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vmerge_vvm_i64m8(vint64m8_t op1, vint64m8_t op2, vbool8_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vmerge_vxm_i64m8(vint64m8_t op1, int64_t op2, vbool8_t mask, siz } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vmerge_vvm_u8m1(vuint8m1_t op1, vuint8m1_t op2, vbool8_t mask, s } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vmerge_vxm_u8m1(vuint8m1_t op1, uint8_t op2, vbool8_t mask, size } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vmerge_vvm_u8m2(vuint8m2_t op1, vuint8m2_t op2, vbool4_t mask, s } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vmerge_vxm_u8m2(vuint8m2_t op1, uint8_t op2, vbool4_t mask, size } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vmerge_vvm_u8m4(vuint8m4_t op1, vuint8m4_t op2, vbool2_t mask, s } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vmerge_vxm_u8m4(vuint8m4_t op1, uint8_t op2, vbool2_t mask, size } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vmerge_vvm_u8m8(vuint8m8_t op1, vuint8m8_t op2, vbool1_t mask, s } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vmerge_vxm_u8m8(vuint8m8_t op1, uint8_t op2, vbool1_t mask, size } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vmerge_vvm_u16m1(vuint16m1_t op1, vuint16m1_t op2, vbool16_t ma } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vmerge_vxm_u16m1(vuint16m1_t op1, uint16_t op2, vbool16_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vmerge_vvm_u16m2(vuint16m2_t op1, vuint16m2_t op2, vbool8_t mas } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vmerge_vxm_u16m2(vuint16m2_t op1, uint16_t op2, vbool8_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vmerge_vvm_u16m4(vuint16m4_t op1, vuint16m4_t op2, vbool4_t mas } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vmerge_vxm_u16m4(vuint16m4_t op1, uint16_t op2, vbool4_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vmerge_vvm_u16m8(vuint16m8_t op1, vuint16m8_t op2, vbool2_t mas } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vmerge_vxm_u16m8(vuint16m8_t op1, uint16_t op2, vbool2_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vmerge_vvm_u32m1(vuint32m1_t op1, vuint32m1_t op2, vbool32_t ma } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vmerge_vxm_u32m1(vuint32m1_t op1, uint32_t op2, vbool32_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vmerge_vvm_u32m2(vuint32m2_t op1, vuint32m2_t op2, vbool16_t ma } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vmerge_vxm_u32m2(vuint32m2_t op1, uint32_t op2, vbool16_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vmerge_vvm_u32m4(vuint32m4_t op1, vuint32m4_t op2, vbool8_t mas } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vmerge_vxm_u32m4(vuint32m4_t op1, uint32_t op2, vbool8_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vmerge_vvm_u32m8(vuint32m8_t op1, vuint32m8_t op2, vbool4_t mas } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vmerge_vxm_u32m8(vuint32m8_t op1, uint32_t op2, vbool4_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vmerge_vvm_u64m1(vuint64m1_t op1, vuint64m1_t op2, vbool64_t ma } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vmerge_vxm_u64m1(vuint64m1_t op1, uint64_t op2, vbool64_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vmerge_vvm_u64m2(vuint64m2_t op1, vuint64m2_t op2, vbool32_t ma } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vmerge_vxm_u64m2(vuint64m2_t op1, uint64_t op2, vbool32_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vmerge_vvm_u64m4(vuint64m4_t op1, vuint64m4_t op2, vbool16_t ma } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vmerge_vxm_u64m4(vuint64m4_t op1, uint64_t op2, vbool16_t mask, } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vmerge_vvm_u64m8(vuint64m8_t op1, vuint64m8_t op2, vbool8_t mas } // CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmax.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmax.c index 5ccea8365b3614..f82ef57646e014 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmax.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmax.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vmax_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vmax_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vmax_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vmax_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vmax_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vmax_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vmax_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vmax_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vmax_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vmax_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vmax_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vmax_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vmax_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vmax_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vmax_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vmax_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vmax_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vmax_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vmax_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vmax_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vmax_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vmax_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vmax_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vmax_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vmax_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vmax_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vmax_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vmax_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vmax_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vmax_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vmax_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vmax_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vmax_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vmax_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m2_t test_vmax_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m2_t test_vmax_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint8m4_t test_vmax_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint8m4_t test_vmax_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint8m8_t test_vmax_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint8m8_t test_vmax_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint16m1_t test_vmax_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint16m1_t test_vmax_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint16m2_t test_vmax_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint16m2_t test_vmax_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint16m4_t test_vmax_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint16m4_t test_vmax_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint16m8_t test_vmax_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint16m8_t test_vmax_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vint32m1_t test_vmax_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vint32m1_t test_vmax_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vint32m2_t test_vmax_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vint32m2_t test_vmax_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vint32m4_t test_vmax_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vint32m4_t test_vmax_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vint32m8_t test_vmax_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vint32m8_t test_vmax_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vint64m1_t test_vmax_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vint64m1_t test_vmax_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vint64m2_t test_vmax_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vint64m2_t test_vmax_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vint64m4_t test_vmax_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vint64m4_t test_vmax_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmax_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vint64m8_t test_vmax_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmax_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmax.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmaxu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmaxu.c index 3592a743a0d63f..cff32016821852 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmaxu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmaxu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vmaxu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vmaxu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vmaxu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vmaxu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vmaxu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vmaxu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m8_t test_vmaxu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint8m8_t test_vmaxu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_vmaxu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m1_t test_vmaxu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m2_t test_vmaxu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m2_t test_vmaxu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m4_t test_vmaxu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint16m4_t test_vmaxu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint16m8_t test_vmaxu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint16m8_t test_vmaxu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m1_t test_vmaxu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m1_t test_vmaxu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m2_t test_vmaxu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m2_t test_vmaxu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vmaxu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vmaxu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vmaxu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vmaxu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint64m1_t test_vmaxu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_vmaxu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_vmaxu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vmaxu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m4_t test_vmaxu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m4_t test_vmaxu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m8_t test_vmaxu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m8_t test_vmaxu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vmaxu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vmaxu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vmaxu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vmaxu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vmaxu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vmaxu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vmaxu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vmaxu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vmaxu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vmaxu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vmaxu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vmaxu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vmaxu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vmaxu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vmaxu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vmaxu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vmaxu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vmaxu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vmaxu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vmaxu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vmaxu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vmaxu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vmaxu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vmaxu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vmaxu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vmaxu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vmaxu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vmaxu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vmaxu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vmaxu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vmaxu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmaxu_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmaxu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmin.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmin.c index 403c6c21fa9f5a..4a257c62d79b75 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmin.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vmin.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vmin_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vmin_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vmin_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vmin_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vmin_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vmin_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vmin_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vmin_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vmin_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vmin_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vmin_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vmin_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vmin_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vmin_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vmin_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vmin_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vmin_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vmin_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vmin_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vmin_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vmin_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vmin_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vmin_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vmin_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vmin_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vmin_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vmin_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vmin_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vmin_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vmin_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vmin_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vmin_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vmin_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vmin_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m2_t test_vmin_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m2_t test_vmin_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint8m4_t test_vmin_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint8m4_t test_vmin_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint8m8_t test_vmin_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint8m8_t test_vmin_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint16m1_t test_vmin_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint16m1_t test_vmin_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint16m2_t test_vmin_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint16m2_t test_vmin_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint16m4_t test_vmin_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint16m4_t test_vmin_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint16m8_t test_vmin_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint16m8_t test_vmin_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vint32m1_t test_vmin_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vint32m1_t test_vmin_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vint32m2_t test_vmin_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vint32m2_t test_vmin_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vint32m4_t test_vmin_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vint32m4_t test_vmin_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vint32m8_t test_vmin_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vint32m8_t test_vmin_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vint64m1_t test_vmin_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vint64m1_t test_vmin_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vint64m2_t test_vmin_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vint64m2_t test_vmin_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vint64m4_t test_vmin_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vint64m4_t test_vmin_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmin_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vint64m8_t test_vmin_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmin_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmin.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vminu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vminu.c index 84e2cc4b5ca180..5cc552ad5d8230 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vminu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-min-max/thead/vminu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vminu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vminu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vminu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vminu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vminu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vminu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m8_t test_vminu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint8m8_t test_vminu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_vminu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m1_t test_vminu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m2_t test_vminu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m2_t test_vminu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m4_t test_vminu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint16m4_t test_vminu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint16m8_t test_vminu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint16m8_t test_vminu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m1_t test_vminu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m1_t test_vminu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m2_t test_vminu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m2_t test_vminu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vminu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vminu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vminu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vminu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint64m1_t test_vminu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_vminu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_vminu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vminu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m4_t test_vminu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m4_t test_vminu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m8_t test_vminu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m8_t test_vminu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vminu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vminu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vminu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vminu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vminu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vminu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vminu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vminu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vminu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vminu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vminu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vminu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vminu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vminu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vminu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vminu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vminu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vminu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vminu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vminu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vminu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vminu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vminu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vminu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vminu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vminu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vminu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vminu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vminu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vminu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vminu_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vminu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vminu_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vminu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vcpop.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vcpop.c index c8a9a8c0215706..c6c460086818d4 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vcpop.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vcpop.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ unsigned long test_vcpop_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.nxv32i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -26,7 +27,7 @@ unsigned long test_vcpop_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.nxv16i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -36,7 +37,7 @@ unsigned long test_vcpop_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.nxv8i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -46,7 +47,7 @@ unsigned long test_vcpop_m_b8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.mask.nxv64i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -56,7 +57,7 @@ unsigned long test_vcpop_m_b1_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.mask.nxv32i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -66,7 +67,7 @@ unsigned long test_vcpop_m_b2_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.mask.nxv16i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -76,7 +77,7 @@ unsigned long test_vcpop_m_b4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.mask.nxv8i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vfirst.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vfirst.c index 8af9122314c2c2..093baba960303e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vfirst.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vfirst.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ long test_vfirst_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.nxv32i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -26,7 +27,7 @@ long test_vfirst_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.nxv16i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -36,7 +37,7 @@ long test_vfirst_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.nxv8i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -46,7 +47,7 @@ long test_vfirst_m_b8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.mask.nxv64i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -56,7 +57,7 @@ long test_vfirst_m_b1_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.mask.nxv32i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -66,7 +67,7 @@ long test_vfirst_m_b2_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.mask.nxv16i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -76,7 +77,7 @@ long test_vfirst_m_b4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.mask.nxv8i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vid.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vid.c index 9d535ce30ad471..9f3b919759103b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vid.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vid.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vid_v_u8m1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv16i8.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_vid_v_u8m2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv32i8.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_vid_v_u8m4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv64i8.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_vid_v_u8m8(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m1 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv4i16.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_vid_v_u16m1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv8i16.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_vid_v_u16m2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv16i16.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_vid_v_u16m4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv32i16.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_vid_v_u16m8(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m1 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv2i32.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_vid_v_u32m1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv4i32.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_vid_v_u32m2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv8i32.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_vid_v_u32m4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv16i32.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_vid_v_u32m8(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m1 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv1i64.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_vid_v_u64m1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv2i64.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_vid_v_u64m2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv4i64.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_vid_v_u64m4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv8i64.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m8_t test_vid_v_u64m8(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv8i8.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vid_v_u8m1_m(vbool8_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv16i8.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m2_t test_vid_v_u8m2_m(vbool4_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv32i8.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m4_t test_vid_v_u8m4_m(vbool2_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv64i8.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m8_t test_vid_v_u8m8_m(vbool1_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv4i16.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vid_v_u16m1_m(vbool16_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv8i16.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m2_t test_vid_v_u16m2_m(vbool8_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv16i16.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m4_t test_vid_v_u16m4_m(vbool4_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv32i16.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m8_t test_vid_v_u16m8_m(vbool2_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv2i32.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vid_v_u32m1_m(vbool32_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv4i32.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m2_t test_vid_v_u32m2_m(vbool16_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv8i32.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m4_t test_vid_v_u32m4_m(vbool8_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv16i32.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m8_t test_vid_v_u32m8_m(vbool4_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv1i64.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vid_v_u64m1_m(vbool64_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv2i64.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_vid_v_u64m2_m(vbool32_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv4i64.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m4_t test_vid_v_u64m4_m(vbool16_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv8i64.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/viota.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/viota.c index 44daddba984e27..5a0a5c051d9020 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/viota.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/viota.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_viota_m_u8m1(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv16i8.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_viota_m_u8m2(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv32i8.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_viota_m_u8m4(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv64i8.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_viota_m_u8m8(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv4i16.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_viota_m_u16m1(vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv8i16.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_viota_m_u16m2(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv16i16.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_viota_m_u16m4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv32i16.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_viota_m_u16m8(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv2i32.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_viota_m_u32m1(vbool32_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv4i32.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_viota_m_u32m2(vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv8i32.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_viota_m_u32m4(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv16i32.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_viota_m_u32m8(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv1i64.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_viota_m_u64m1(vbool64_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv2i64.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_viota_m_u64m2(vbool32_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv4i64.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_viota_m_u64m4(vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv8i64.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m8_t test_viota_m_u64m8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv8i8.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_viota_m_u8m1_m(vbool8_t mask, vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv16i8.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m2_t test_viota_m_u8m2_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv32i8.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m4_t test_viota_m_u8m4_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv64i8.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m8_t test_viota_m_u8m8_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv4i16.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_viota_m_u16m1_m(vbool16_t mask, vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv8i16.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m2_t test_viota_m_u16m2_m(vbool8_t mask, vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv16i16.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m4_t test_viota_m_u16m4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv32i16.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m8_t test_viota_m_u16m8_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv2i32.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_viota_m_u32m1_m(vbool32_t mask, vbool32_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv4i32.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m2_t test_viota_m_u32m2_m(vbool16_t mask, vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv8i32.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m4_t test_viota_m_u32m4_m(vbool8_t mask, vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv16i32.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m8_t test_viota_m_u32m8_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv1i64.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_viota_m_u64m1_m(vbool64_t mask, vbool64_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv2i64.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_viota_m_u64m2_m(vbool32_t mask, vbool32_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv4i64.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m4_t test_viota_m_u64m4_m(vbool16_t mask, vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv8i64.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmand.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmand.c index 4f5bacf2db0e6d..4808989001aa14 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmand.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmand.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmand_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmand_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmand_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmand_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmand_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmand_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmandnot.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmandnot.c index 63e457bc299b2b..66ac7146e57f57 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmandnot.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmandnot.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmandnot_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmandnot_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmandnot.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmandnot_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmandnot_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmandnot.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmandnot_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmandnot_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmandnot.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmclr.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmclr.c index 094f1f9cfd9081..25a0241334f118 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmclr.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmclr.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmclr_m_b1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmclr_m_b2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmclr.nxv32i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmclr_m_b2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmclr_m_b4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmclr.nxv16i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmclr_m_b4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmclr_m_b8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmclr.nxv8i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmmv.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmmv.c index 9cfc08904f95a8..eb5ac5c96ef41d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmmv.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmmv.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmmv_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmmv_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv32i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmmv_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmmv_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv16i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmmv_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmmv_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv8i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnand.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnand.c index ced1540b013458..e8ff4e1ccf0f01 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnand.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnand.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmnand_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnand_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmnand_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnand_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmnand_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnand_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnor.c index 3db94e1b0aa16a..af2fa0afd90e5d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmnor_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnor_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnor.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmnor_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnor_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnor.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmnor_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnor_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnor.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnot.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnot.c index a4e07f4eea5ad1..699f887ae50485 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnot.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmnot.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmnot_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnot_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv32i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmnot_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnot_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv16i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmnot_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnot_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv8i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmor.c index 5831fd5eefc31c..d50120b80fe25e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmor_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmor_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmor.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmor_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmor_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmor.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmor_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmor_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmor.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmornot.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmornot.c index bbd82322c7a0cf..46292718063555 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmornot.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmornot.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmornot_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmornot_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmornot.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmornot_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmornot_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmornot.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmornot_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmornot_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmornot.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsbf.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsbf.c index 9de6fb577f2d6f..650169fa334c41 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsbf.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsbf.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmsbf_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.nxv32i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmsbf_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.nxv16i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsbf_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.nxv8i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool8_t test_vmsbf_m_b8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.mask.nxv64i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool1_t test_vmsbf_m_b1_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.mask.nxv32i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsbf_m_b2_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.mask.nxv16i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool4_t test_vmsbf_m_b4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.mask.nxv8i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmset.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmset.c index 43074ae051105d..e79aec1f016fc1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmset.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmset.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmset_m_b1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmset_m_b2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmset.nxv32i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmset_m_b2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmset_m_b4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmset.nxv16i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmset_m_b4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmset_m_b8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmset.nxv8i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsif.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsif.c index d4dbe130e81527..6765dd7cbfd781 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsif.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsif.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmsif_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.nxv32i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmsif_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.nxv16i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsif_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.nxv8i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool8_t test_vmsif_m_b8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.mask.nxv64i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool1_t test_vmsif_m_b1_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.mask.nxv32i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsif_m_b2_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.mask.nxv16i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool4_t test_vmsif_m_b4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.mask.nxv8i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsof.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsof.c index 2bab34f333c63a..81ac1129a77b92 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsof.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmsof.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmsof_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.nxv32i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmsof_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.nxv16i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsof_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.nxv8i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool8_t test_vmsof_m_b8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.mask.nxv64i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool1_t test_vmsof_m_b1_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.mask.nxv32i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsof_m_b2_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.mask.nxv16i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool4_t test_vmsof_m_b4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.mask.nxv8i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmxnor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmxnor.c index 24fbc72dd2f873..24e5d436aaa729 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmxnor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmxnor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmxnor_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxnor_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxnor.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmxnor_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxnor_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxnor.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmxnor_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxnor_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxnor.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmxor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmxor.c index 060ddc0152bb1c..982a8d00bf3152 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmxor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/thead/vmxor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmxor_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxor_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxor.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmxor_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxor_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxor.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmxor_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxor_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxor.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vcpop.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vcpop.c index 97d42071b40541..9261b44f4730fd 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vcpop.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vcpop.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ unsigned long test_vcpop_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.nxv32i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -26,7 +27,7 @@ unsigned long test_vcpop_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.nxv16i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -36,7 +37,7 @@ unsigned long test_vcpop_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.nxv8i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -46,7 +47,7 @@ unsigned long test_vcpop_m_b8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.mask.nxv64i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -56,7 +57,7 @@ unsigned long test_vcpop_m_b1_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.mask.nxv32i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -66,7 +67,7 @@ unsigned long test_vcpop_m_b2_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.mask.nxv16i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -76,7 +77,7 @@ unsigned long test_vcpop_m_b4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.mask.nxv8i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vfirst.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vfirst.c index 3d7ae9f2a3a6cf..a64d96fe6f6f2c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vfirst.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vfirst.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ long test_vfirst_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.nxv32i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -26,7 +27,7 @@ long test_vfirst_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.nxv16i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -36,7 +37,7 @@ long test_vfirst_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.nxv8i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -46,7 +47,7 @@ long test_vfirst_m_b8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.mask.nxv64i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -56,7 +57,7 @@ long test_vfirst_m_b1_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.mask.nxv32i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -66,7 +67,7 @@ long test_vfirst_m_b2_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.mask.nxv16i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] @@ -76,7 +77,7 @@ long test_vfirst_m_b4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local i64 @test_vfirst_m_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmfirst.mask.nxv8i1.i64( [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret i64 [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vid.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vid.c index eaaf80cb1b190d..b7bb255be36bba 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vid.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vid.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vid_v_u8m1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv16i8.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_vid_v_u8m2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv32i8.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_vid_v_u8m4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv64i8.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_vid_v_u8m8(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m1 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv4i16.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_vid_v_u16m1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv8i16.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_vid_v_u16m2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv16i16.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_vid_v_u16m4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv32i16.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_vid_v_u16m8(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m1 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv2i32.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_vid_v_u32m1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv4i32.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_vid_v_u32m2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv8i32.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_vid_v_u32m4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv16i32.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_vid_v_u32m8(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m1 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv1i64.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_vid_v_u64m1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv2i64.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_vid_v_u64m2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv4i64.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_vid_v_u64m4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.nxv8i64.i64( poison, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m8_t test_vid_v_u64m8(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv8i8.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vid_v_u8m1_m(vbool8_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv16i8.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m2_t test_vid_v_u8m2_m(vbool4_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv32i8.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m4_t test_vid_v_u8m4_m(vbool2_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv64i8.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m8_t test_vid_v_u8m8_m(vbool1_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv4i16.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vid_v_u16m1_m(vbool16_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv8i16.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m2_t test_vid_v_u16m2_m(vbool8_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv16i16.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m4_t test_vid_v_u16m4_m(vbool4_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv32i16.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m8_t test_vid_v_u16m8_m(vbool2_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv2i32.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vid_v_u32m1_m(vbool32_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv4i32.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m2_t test_vid_v_u32m2_m(vbool16_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv8i32.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m4_t test_vid_v_u32m4_m(vbool8_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv16i32.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m8_t test_vid_v_u32m8_m(vbool4_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv1i64.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vid_v_u64m1_m(vbool64_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv2i64.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_vid_v_u64m2_m(vbool32_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv4i64.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m4_t test_vid_v_u64m4_m(vbool16_t mask, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vid_v_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vid.mask.nxv8i64.i64( poison, [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/viota.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/viota.c index c8a3ec761408a9..6a8f4b9c4fa123 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/viota.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/viota.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_viota_m_u8m1(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv16i8.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m2_t test_viota_m_u8m2(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv32i8.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m4_t test_viota_m_u8m4(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv64i8.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m8_t test_viota_m_u8m8(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv4i16.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_viota_m_u16m1(vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv8i16.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m2_t test_viota_m_u16m2(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv16i16.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_viota_m_u16m4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv32i16.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m8_t test_viota_m_u16m8(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv2i32.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_viota_m_u32m1(vbool32_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv4i32.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m2_t test_viota_m_u32m2(vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv8i32.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m4_t test_viota_m_u32m4(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv16i32.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_viota_m_u32m8(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv1i64.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_viota_m_u64m1(vbool64_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv2i64.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_viota_m_u64m2(vbool32_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv4i64.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_viota_m_u64m4(vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.nxv8i64.i64( poison, [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m8_t test_viota_m_u64m8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv8i8.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_viota_m_u8m1_m(vbool8_t mask, vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv16i8.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m2_t test_viota_m_u8m2_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv32i8.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m4_t test_viota_m_u8m4_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv64i8.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m8_t test_viota_m_u8m8_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv4i16.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_viota_m_u16m1_m(vbool16_t mask, vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv8i16.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m2_t test_viota_m_u16m2_m(vbool8_t mask, vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv16i16.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m4_t test_viota_m_u16m4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv32i16.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m8_t test_viota_m_u16m8_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv2i32.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_viota_m_u32m1_m(vbool32_t mask, vbool32_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv4i32.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m2_t test_viota_m_u32m2_m(vbool16_t mask, vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv8i32.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m4_t test_viota_m_u32m4_m(vbool8_t mask, vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv16i32.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m8_t test_viota_m_u32m8_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv1i64.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_viota_m_u64m1_m(vbool64_t mask, vbool64_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv2i64.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_viota_m_u64m2_m(vbool32_t mask, vbool32_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv4i64.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m4_t test_viota_m_u64m4_m(vbool16_t mask, vbool16_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_viota_m_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.viota.mask.nxv8i64.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmand.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmand.c index 7b8c5714013653..24b8cea891519d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmand.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmand.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmand_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmand_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmand_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmand_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmand_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmand_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmandnot.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmandnot.c index 182b634852f365..6351febc4efff5 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmandnot.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmandnot.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmandnot_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmandnot_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmandnot.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmandnot_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmandnot_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmandnot.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmandnot_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmandnot_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmandnot.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmclr.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmclr.c index f659423d2681df..eda9016afbd81d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmclr.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmclr.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmclr_m_b1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmclr_m_b2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmclr.nxv32i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmclr_m_b2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmclr_m_b4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmclr.nxv16i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmclr_m_b4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmclr_m_b8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmclr.nxv8i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmmv.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmmv.c index 06acd20e0e0b40..d79e298c329928 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmmv.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmmv.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmmv_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmmv_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv32i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmmv_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmmv_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv16i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmmv_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmmv_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmand.nxv8i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnand.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnand.c index ac11dca9fc41d0..5b45810f6ecc15 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnand.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnand.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmnand_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnand_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmnand_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnand_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmnand_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnand_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnor.c index dad841975a49af..4fda9ccc833663 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmnor_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnor_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnor.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmnor_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnor_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnor.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmnor_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnor_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnor.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnot.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnot.c index c7ed3fda8fcbb7..e2fd6321256b36 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnot.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmnot.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmnot_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnot_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv32i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmnot_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnot_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv16i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmnot_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmnot_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmnand.nxv8i1.i64( [[OP1]], [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmor.c index 560adeadbc4442..97e1bc1e03a4fb 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmor_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmor_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmor.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmor_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmor_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmor.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmor_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmor_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmor.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmornot.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmornot.c index 942f2fe8aae703..26e4e7d715801b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmornot.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmornot.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmornot_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmornot_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmornot.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmornot_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmornot_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmornot.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmornot_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmornot_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmornot.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsbf.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsbf.c index 636a88dd6d987f..a7d66e4c394d49 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsbf.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsbf.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmsbf_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.nxv32i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmsbf_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.nxv16i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsbf_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.nxv8i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool8_t test_vmsbf_m_b8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.mask.nxv64i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool1_t test_vmsbf_m_b1_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.mask.nxv32i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsbf_m_b2_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.mask.nxv16i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool4_t test_vmsbf_m_b4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsbf_m_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsbf.mask.nxv8i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmset.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmset.c index 1b8f90240d49a0..6cc8a7900dcd4b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmset.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmset.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmset_m_b1(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmset_m_b2 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmset.nxv32i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmset_m_b2(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmset_m_b4 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmset.nxv16i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmset_m_b4(size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmset_m_b8 -// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmset.nxv8i1.i64(i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsif.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsif.c index 095f4b4486123f..2a0e898021baf0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsif.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsif.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmsif_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.nxv32i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmsif_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.nxv16i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsif_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.nxv8i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool8_t test_vmsif_m_b8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.mask.nxv64i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool1_t test_vmsif_m_b1_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.mask.nxv32i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsif_m_b2_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.mask.nxv16i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool4_t test_vmsif_m_b4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsif_m_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsif.mask.nxv8i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsof.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsof.c index 2f63b9601b4116..bc7e25f7972c98 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsof.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmsof.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmsof_m_b1(vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.nxv32i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmsof_m_b2(vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.nxv16i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmsof_m_b4(vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.nxv8i1.i64( [[OP1]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vbool8_t test_vmsof_m_b8(vbool8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.mask.nxv64i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vbool1_t test_vmsof_m_b1_m(vbool1_t mask, vbool1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.mask.nxv32i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vbool2_t test_vmsof_m_b2_m(vbool2_t mask, vbool2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.mask.nxv16i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vbool4_t test_vmsof_m_b4_m(vbool4_t mask, vbool4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmsof_m_b8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsof.mask.nxv8i1.i64( poison, [[OP1]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmxnor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmxnor.c index e90578fd747467..d46b5600c3f967 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmxnor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmxnor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmxnor_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxnor_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxnor.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmxnor_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxnor_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxnor.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmxnor_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxnor_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxnor.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmxor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmxor.c index be0cd3439d4d0c..eb920317f9d577 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmxor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-mask-logical/wrappers/vmxor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vbool1_t test_vmxor_mm_b1(vbool1_t op1, vbool1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxor_mm_b2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxor.nxv32i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vbool2_t test_vmxor_mm_b2(vbool2_t op1, vbool2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxor_mm_b4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxor.nxv16i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vbool4_t test_vmxor_mm_b4(vbool4_t op1, vbool4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmxor_mm_b8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmxor.nxv8i1.i64( [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-fixed-point-clip/thead/vnclip.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-fixed-point-clip/thead/vnclip.c index 548ce86a9c20bb..4af489d2f3226b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-fixed-point-clip/thead/vnclip.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-fixed-point-clip/thead/vnclip.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vnclip_wv_i8m1(vint16m2_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv8i8.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vnclip_wx_i8m1(vint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv16i8.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vnclip_wv_i8m2(vint16m4_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv16i8.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vnclip_wx_i8m2(vint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv32i8.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vnclip_wv_i8m4(vint16m8_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv32i8.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vnclip_wx_i8m4(vint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv4i16.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vnclip_wv_i16m1(vint32m2_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv4i16.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vnclip_wx_i16m1(vint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv8i16.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m2_t test_vnclip_wv_i16m2(vint32m4_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv8i16.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m2_t test_vnclip_wx_i16m2(vint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv16i16.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m4_t test_vnclip_wv_i16m4(vint32m8_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv16i16.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m4_t test_vnclip_wx_i16m4(vint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv2i32.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint32m1_t test_vnclip_wv_i32m1(vint64m2_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv2i32.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint32m1_t test_vnclip_wx_i32m1(vint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv4i32.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint32m2_t test_vnclip_wv_i32m2(vint64m4_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv4i32.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint32m2_t test_vnclip_wx_i32m2(vint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv8i32.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m4_t test_vnclip_wv_i32m4(vint64m8_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.nxv8i32.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m4_t test_vnclip_wx_i32m4(vint64m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv8i8.nxv8i16.nxv8i8.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint8m1_t test_vnclip_wv_i8m1_m(vbool8_t mask, vint16m2_t op1, vuint8m1_t shift, } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv8i8.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint8m1_t test_vnclip_wx_i8m1_m(vbool8_t mask, vint16m2_t op1, size_t shift, siz } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv16i8.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint8m2_t test_vnclip_wv_i8m2_m(vbool4_t mask, vint16m4_t op1, vuint8m2_t shift, } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv16i8.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint8m2_t test_vnclip_wx_i8m2_m(vbool4_t mask, vint16m4_t op1, size_t shift, siz } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv32i8.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint8m4_t test_vnclip_wv_i8m4_m(vbool2_t mask, vint16m8_t op1, vuint8m4_t shift, } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv32i8.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint8m4_t test_vnclip_wx_i8m4_m(vbool2_t mask, vint16m8_t op1, size_t shift, siz } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv4i16.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint16m1_t test_vnclip_wv_i16m1_m(vbool16_t mask, vint32m2_t op1, vuint16m1_t sh } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv4i16.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint16m1_t test_vnclip_wx_i16m1_m(vbool16_t mask, vint32m2_t op1, size_t shift, } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv8i16.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint16m2_t test_vnclip_wv_i16m2_m(vbool8_t mask, vint32m4_t op1, vuint16m2_t shi } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv8i16.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint16m2_t test_vnclip_wx_i16m2_m(vbool8_t mask, vint32m4_t op1, size_t shift, s } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv16i16.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint16m4_t test_vnclip_wv_i16m4_m(vbool4_t mask, vint32m8_t op1, vuint16m4_t shi } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv16i16.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint16m4_t test_vnclip_wx_i16m4_m(vbool4_t mask, vint32m8_t op1, size_t shift, s } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv2i32.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint32m1_t test_vnclip_wv_i32m1_m(vbool32_t mask, vint64m2_t op1, vuint32m1_t sh } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv2i32.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint32m1_t test_vnclip_wx_i32m1_m(vbool32_t mask, vint64m2_t op1, size_t shift, } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv4i32.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint32m2_t test_vnclip_wv_i32m2_m(vbool16_t mask, vint64m4_t op1, vuint32m2_t sh } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv4i32.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint32m2_t test_vnclip_wx_i32m2_m(vbool16_t mask, vint64m4_t op1, size_t shift, } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv8i32.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint32m4_t test_vnclip_wv_i32m4_m(vbool8_t mask, vint64m8_t op1, vuint32m4_t shi } // CHECK-RV64-LABEL: define dso_local @test_vnclip_wx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclip.mask.nxv8i32.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-fixed-point-clip/thead/vnclipu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-fixed-point-clip/thead/vnclipu.c index 9dedef37b53f90..bb78915486ccb2 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-fixed-point-clip/thead/vnclipu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-fixed-point-clip/thead/vnclipu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vnclipu_wv_u8m1(vuint16m2_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv8i8.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vnclipu_wx_u8m1(vuint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv16i8.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vnclipu_wv_u8m2(vuint16m4_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv16i8.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vnclipu_wx_u8m2(vuint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv32i8.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vnclipu_wv_u8m4(vuint16m8_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv32i8.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vnclipu_wx_u8m4(vuint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv4i16.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m1_t test_vnclipu_wv_u16m1(vuint32m2_t op1, vuint16m1_t shift, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv4i16.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m1_t test_vnclipu_wx_u16m1(vuint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv8i16.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m2_t test_vnclipu_wv_u16m2(vuint32m4_t op1, vuint16m2_t shift, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv8i16.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m2_t test_vnclipu_wx_u16m2(vuint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv16i16.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m4_t test_vnclipu_wv_u16m4(vuint32m8_t op1, vuint16m4_t shift, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv16i16.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m4_t test_vnclipu_wx_u16m4(vuint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv2i32.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint32m1_t test_vnclipu_wv_u32m1(vuint64m2_t op1, vuint32m1_t shift, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv2i32.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint32m1_t test_vnclipu_wx_u32m1(vuint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv4i32.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint32m2_t test_vnclipu_wv_u32m2(vuint64m4_t op1, vuint32m2_t shift, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv4i32.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint32m2_t test_vnclipu_wx_u32m2(vuint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv8i32.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m4_t test_vnclipu_wv_u32m4(vuint64m8_t op1, vuint32m4_t shift, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.nxv8i32.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m4_t test_vnclipu_wx_u32m4(vuint64m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv8i8.nxv8i16.nxv8i8.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vnclipu_wv_u8m1_m(vbool8_t mask, vuint16m2_t op1, vuint8m1_t shi } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv8i8.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vnclipu_wx_u8m1_m(vbool8_t mask, vuint16m2_t op1, size_t shift, } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv16i8.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint8m2_t test_vnclipu_wv_u8m2_m(vbool4_t mask, vuint16m4_t op1, vuint8m2_t shi } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv16i8.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint8m2_t test_vnclipu_wx_u8m2_m(vbool4_t mask, vuint16m4_t op1, size_t shift, } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv32i8.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint8m4_t test_vnclipu_wv_u8m4_m(vbool2_t mask, vuint16m8_t op1, vuint8m4_t shi } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv32i8.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint8m4_t test_vnclipu_wx_u8m4_m(vbool2_t mask, vuint16m8_t op1, size_t shift, } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv4i16.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint16m1_t test_vnclipu_wv_u16m1_m(vbool16_t mask, vuint32m2_t op1, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv4i16.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint16m1_t test_vnclipu_wx_u16m1_m(vbool16_t mask, vuint32m2_t op1, size_t shif } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv8i16.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint16m2_t test_vnclipu_wv_u16m2_m(vbool8_t mask, vuint32m4_t op1, vuint16m2_t } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv8i16.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint16m2_t test_vnclipu_wx_u16m2_m(vbool8_t mask, vuint32m4_t op1, size_t shift } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv16i16.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint16m4_t test_vnclipu_wv_u16m4_m(vbool4_t mask, vuint32m8_t op1, vuint16m4_t } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv16i16.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint16m4_t test_vnclipu_wx_u16m4_m(vbool4_t mask, vuint32m8_t op1, size_t shift } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv2i32.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint32m1_t test_vnclipu_wv_u32m1_m(vbool32_t mask, vuint64m2_t op1, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv2i32.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint32m1_t test_vnclipu_wx_u32m1_m(vbool32_t mask, vuint64m2_t op1, size_t shif } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv4i32.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint32m2_t test_vnclipu_wv_u32m2_m(vbool16_t mask, vuint64m4_t op1, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv4i32.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint32m2_t test_vnclipu_wx_u32m2_m(vbool16_t mask, vuint64m4_t op1, size_t shif } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv8i32.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint32m4_t test_vnclipu_wv_u32m4_m(vbool8_t mask, vuint64m8_t op1, vuint32m4_t } // CHECK-RV64-LABEL: define dso_local @test_vnclipu_wx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnclipu.mask.nxv8i32.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], [[MASK]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/thead/vnsra.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/thead/vnsra.c index 13276fd39ad39d..ec39aad5a172c5 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/thead/vnsra.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/thead/vnsra.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vnsra_wv_i8m1(vint16m2_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv8i8.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vnsra_wx_i8m1(vint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv16i8.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vnsra_wv_i8m2(vint16m4_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv16i8.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vnsra_wx_i8m2(vint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv32i8.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vnsra_wv_i8m4(vint16m8_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv32i8.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vnsra_wx_i8m4(vint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv4i16.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vnsra_wv_i16m1(vint32m2_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv4i16.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vnsra_wx_i16m1(vint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv8i16.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m2_t test_vnsra_wv_i16m2(vint32m4_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv8i16.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m2_t test_vnsra_wx_i16m2(vint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv16i16.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m4_t test_vnsra_wv_i16m4(vint32m8_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv16i16.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m4_t test_vnsra_wx_i16m4(vint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv2i32.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint32m1_t test_vnsra_wv_i32m1(vint64m2_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv2i32.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint32m1_t test_vnsra_wx_i32m1(vint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv4i32.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint32m2_t test_vnsra_wv_i32m2(vint64m4_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv4i32.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint32m2_t test_vnsra_wx_i32m2(vint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv8i32.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m4_t test_vnsra_wv_i32m4(vint64m8_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv8i32.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/thead/vnsrl.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/thead/vnsrl.c index 9dccde214c1bb5..c9e382d6eda2b0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/thead/vnsrl.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/thead/vnsrl.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vnsrl_wv_u8m1(vuint16m2_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv8i8.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vnsrl_wx_u8m1(vuint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv16i8.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vnsrl_wv_u8m2(vuint16m4_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv16i8.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vnsrl_wx_u8m2(vuint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv32i8.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vnsrl_wv_u8m4(vuint16m8_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv32i8.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vnsrl_wx_u8m4(vuint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv4i16.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m1_t test_vnsrl_wv_u16m1(vuint32m2_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv4i16.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m1_t test_vnsrl_wx_u16m1(vuint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv8i16.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m2_t test_vnsrl_wv_u16m2(vuint32m4_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv8i16.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m2_t test_vnsrl_wx_u16m2(vuint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv16i16.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m4_t test_vnsrl_wv_u16m4(vuint32m8_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv16i16.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m4_t test_vnsrl_wx_u16m4(vuint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv2i32.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint32m1_t test_vnsrl_wv_u32m1(vuint64m2_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv2i32.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint32m1_t test_vnsrl_wx_u32m1(vuint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv4i32.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint32m2_t test_vnsrl_wv_u32m2(vuint64m4_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv4i32.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint32m2_t test_vnsrl_wx_u32m2(vuint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv8i32.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m4_t test_vnsrl_wv_u32m4(vuint64m8_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv8i32.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/wrappers/vnsra.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/wrappers/vnsra.c index 73e4e314b497c9..9d93766594d4f4 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/wrappers/vnsra.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/wrappers/vnsra.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vnsra_wv_i8m1(vint16m2_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv8i8.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vnsra_wx_i8m1(vint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv16i8.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vnsra_wv_i8m2(vint16m4_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv16i8.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vnsra_wx_i8m2(vint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv32i8.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vnsra_wv_i8m4(vint16m8_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv32i8.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vnsra_wx_i8m4(vint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv4i16.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vnsra_wv_i16m1(vint32m2_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv4i16.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vnsra_wx_i16m1(vint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv8i16.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m2_t test_vnsra_wv_i16m2(vint32m4_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv8i16.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m2_t test_vnsra_wx_i16m2(vint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv16i16.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m4_t test_vnsra_wv_i16m4(vint32m8_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv16i16.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m4_t test_vnsra_wx_i16m4(vint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv2i32.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint32m1_t test_vnsra_wv_i32m1(vint64m2_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv2i32.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint32m1_t test_vnsra_wx_i32m1(vint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv4i32.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint32m2_t test_vnsra_wv_i32m2(vint64m4_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv4i32.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint32m2_t test_vnsra_wx_i32m2(vint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv8i32.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m4_t test_vnsra_wv_i32m4(vint64m8_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsra_wx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsra.nxv8i32.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/wrappers/vnsrl.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/wrappers/vnsrl.c index f441a35f3a5f07..e82f6874996fc7 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/wrappers/vnsrl.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-narrowing-shift/wrappers/vnsrl.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vnsrl_wv_u8m1(vuint16m2_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv8i8.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vnsrl_wx_u8m1(vuint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv16i8.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vnsrl_wv_u8m2(vuint16m4_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv16i8.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vnsrl_wx_u8m2(vuint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv32i8.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vnsrl_wv_u8m4(vuint16m8_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv32i8.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vnsrl_wx_u8m4(vuint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv4i16.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m1_t test_vnsrl_wv_u16m1(vuint32m2_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv4i16.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m1_t test_vnsrl_wx_u16m1(vuint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv8i16.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m2_t test_vnsrl_wv_u16m2(vuint32m4_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv8i16.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m2_t test_vnsrl_wx_u16m2(vuint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv16i16.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m4_t test_vnsrl_wv_u16m4(vuint32m8_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv16i16.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m4_t test_vnsrl_wx_u16m4(vuint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv2i32.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint32m1_t test_vnsrl_wv_u32m1(vuint64m2_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv2i32.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint32m1_t test_vnsrl_wx_u32m1(vuint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv4i32.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint32m2_t test_vnsrl_wv_u32m2(vuint64m4_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv4i32.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint32m2_t test_vnsrl_wx_u32m2(vuint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv8i32.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m4_t test_vnsrl_wv_u32m4(vuint64m8_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vnsrl_wx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnsrl.nxv8i32.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredmax.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredmax.c index 2804c2dfbc64d0..1b341b8b364e25 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredmax.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredmax.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_vfredmax_vs_f16m1_f16m1(vfloat16m1_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m2_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m1_t test_vfredmax_vs_f16m2_f16m1(vfloat16m2_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m4_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m1_t test_vfredmax_vs_f16m4_f16m1(vfloat16m4_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m8_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m1_t test_vfredmax_vs_f16m8_f16m1(vfloat16m8_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m1_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m1_t test_vfredmax_vs_f32m1_f32m1(vfloat32m1_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat32m1_t test_vfredmax_vs_f32m2_f32m1(vfloat32m2_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat32m1_t test_vfredmax_vs_f32m4_f32m1(vfloat32m4_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat32m1_t test_vfredmax_vs_f32m8_f32m1(vfloat32m8_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat64m1_t test_vfredmax_vs_f64m1_f64m1(vfloat64m1_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat64m1_t test_vfredmax_vs_f64m2_f64m1(vfloat64m2_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat64m1_t test_vfredmax_vs_f64m4_f64m1(vfloat64m4_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat64m1_t test_vfredmax_vs_f64m8_f64m1(vfloat64m8_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m1_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv4f16.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat16m1_t test_vfredmax_vs_f16m1_f16m1_m(vbool16_t mask, vfloat16m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m2_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat16m1_t test_vfredmax_vs_f16m2_f16m1_m(vbool8_t mask, vfloat16m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m4_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat16m1_t test_vfredmax_vs_f16m4_f16m1_m(vbool4_t mask, vfloat16m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m8_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vfloat16m1_t test_vfredmax_vs_f16m8_f16m1_m(vbool2_t mask, vfloat16m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vfloat32m1_t test_vfredmax_vs_f32m1_f32m1_m(vbool32_t mask, vfloat32m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vfloat32m1_t test_vfredmax_vs_f32m2_f32m1_m(vbool16_t mask, vfloat32m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vfloat32m1_t test_vfredmax_vs_f32m4_f32m1_m(vbool8_t mask, vfloat32m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vfloat32m1_t test_vfredmax_vs_f32m8_f32m1_m(vbool4_t mask, vfloat32m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vfloat64m1_t test_vfredmax_vs_f64m1_f64m1_m(vbool64_t mask, vfloat64m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vfloat64m1_t test_vfredmax_vs_f64m2_f64m1_m(vbool32_t mask, vfloat64m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vfloat64m1_t test_vfredmax_vs_f64m4_f64m1_m(vbool16_t mask, vfloat64m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredmin.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredmin.c index aeffaaf3cb3507..a6caa0dca28685 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredmin.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredmin.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_vfredmin_vs_f16m1_f16m1(vfloat16m1_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m2_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m1_t test_vfredmin_vs_f16m2_f16m1(vfloat16m2_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m4_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m1_t test_vfredmin_vs_f16m4_f16m1(vfloat16m4_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m8_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m1_t test_vfredmin_vs_f16m8_f16m1(vfloat16m8_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m1_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m1_t test_vfredmin_vs_f32m1_f32m1(vfloat32m1_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat32m1_t test_vfredmin_vs_f32m2_f32m1(vfloat32m2_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat32m1_t test_vfredmin_vs_f32m4_f32m1(vfloat32m4_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat32m1_t test_vfredmin_vs_f32m8_f32m1(vfloat32m8_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat64m1_t test_vfredmin_vs_f64m1_f64m1(vfloat64m1_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat64m1_t test_vfredmin_vs_f64m2_f64m1(vfloat64m2_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat64m1_t test_vfredmin_vs_f64m4_f64m1(vfloat64m4_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat64m1_t test_vfredmin_vs_f64m8_f64m1(vfloat64m8_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m1_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv4f16.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat16m1_t test_vfredmin_vs_f16m1_f16m1_m(vbool16_t mask, vfloat16m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m2_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat16m1_t test_vfredmin_vs_f16m2_f16m1_m(vbool8_t mask, vfloat16m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m4_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat16m1_t test_vfredmin_vs_f16m4_f16m1_m(vbool4_t mask, vfloat16m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m8_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vfloat16m1_t test_vfredmin_vs_f16m8_f16m1_m(vbool2_t mask, vfloat16m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vfloat32m1_t test_vfredmin_vs_f32m1_f32m1_m(vbool32_t mask, vfloat32m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vfloat32m1_t test_vfredmin_vs_f32m2_f32m1_m(vbool16_t mask, vfloat32m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vfloat32m1_t test_vfredmin_vs_f32m4_f32m1_m(vbool8_t mask, vfloat32m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vfloat32m1_t test_vfredmin_vs_f32m8_f32m1_m(vbool4_t mask, vfloat32m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vfloat64m1_t test_vfredmin_vs_f64m1_f64m1_m(vbool64_t mask, vfloat64m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vfloat64m1_t test_vfredmin_vs_f64m2_f64m1_m(vbool32_t mask, vfloat64m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vfloat64m1_t test_vfredmin_vs_f64m4_f64m1_m(vbool16_t mask, vfloat64m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredosum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredosum.c index a6dbb47d2cf968..67f8315dee62da 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredosum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredosum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_vfredosum_vs_f16m1_f16m1(vfloat16m1_t vector, vfloat16m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m2_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m1_t test_vfredosum_vs_f16m2_f16m1(vfloat16m2_t vector, vfloat16m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m4_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m1_t test_vfredosum_vs_f16m4_f16m1(vfloat16m4_t vector, vfloat16m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m8_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m1_t test_vfredosum_vs_f16m8_f16m1(vfloat16m8_t vector, vfloat16m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m1_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m1_t test_vfredosum_vs_f32m1_f32m1(vfloat32m1_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat32m1_t test_vfredosum_vs_f32m2_f32m1(vfloat32m2_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat32m1_t test_vfredosum_vs_f32m4_f32m1(vfloat32m4_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat32m1_t test_vfredosum_vs_f32m8_f32m1(vfloat32m8_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat64m1_t test_vfredosum_vs_f64m1_f64m1(vfloat64m1_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat64m1_t test_vfredosum_vs_f64m2_f64m1(vfloat64m2_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat64m1_t test_vfredosum_vs_f64m4_f64m1(vfloat64m4_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat64m1_t test_vfredosum_vs_f64m8_f64m1(vfloat64m8_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m1_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv4f16.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat16m1_t test_vfredosum_vs_f16m1_f16m1_m(vbool16_t mask, vfloat16m1_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m2_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat16m1_t test_vfredosum_vs_f16m2_f16m1_m(vbool8_t mask, vfloat16m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m4_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat16m1_t test_vfredosum_vs_f16m4_f16m1_m(vbool4_t mask, vfloat16m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m8_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vfloat16m1_t test_vfredosum_vs_f16m8_f16m1_m(vbool2_t mask, vfloat16m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vfloat32m1_t test_vfredosum_vs_f32m1_f32m1_m(vbool32_t mask, vfloat32m1_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vfloat32m1_t test_vfredosum_vs_f32m2_f32m1_m(vbool16_t mask, vfloat32m2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vfloat32m1_t test_vfredosum_vs_f32m4_f32m1_m(vbool8_t mask, vfloat32m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vfloat32m1_t test_vfredosum_vs_f32m8_f32m1_m(vbool4_t mask, vfloat32m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vfloat64m1_t test_vfredosum_vs_f64m1_f64m1_m(vbool64_t mask, vfloat64m1_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vfloat64m1_t test_vfredosum_vs_f64m2_f64m1_m(vbool32_t mask, vfloat64m2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vfloat64m1_t test_vfredosum_vs_f64m4_f64m1_m(vbool16_t mask, vfloat64m4_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredsum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredsum.c index 0e712031abf260..01618e1bfaf51c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredsum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfredsum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_vfredsum_vs_f16m1_f16m1(vfloat16m1_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m2_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m1_t test_vfredsum_vs_f16m2_f16m1(vfloat16m2_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m4_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m1_t test_vfredsum_vs_f16m4_f16m1(vfloat16m4_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m8_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m1_t test_vfredsum_vs_f16m8_f16m1(vfloat16m8_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m1_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m1_t test_vfredsum_vs_f32m1_f32m1(vfloat32m1_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat32m1_t test_vfredsum_vs_f32m2_f32m1(vfloat32m2_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat32m1_t test_vfredsum_vs_f32m4_f32m1(vfloat32m4_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat32m1_t test_vfredsum_vs_f32m8_f32m1(vfloat32m8_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat64m1_t test_vfredsum_vs_f64m1_f64m1(vfloat64m1_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat64m1_t test_vfredsum_vs_f64m2_f64m1(vfloat64m2_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat64m1_t test_vfredsum_vs_f64m4_f64m1(vfloat64m4_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat64m1_t test_vfredsum_vs_f64m8_f64m1(vfloat64m8_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m1_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv4f16.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat16m1_t test_vfredsum_vs_f16m1_f16m1_m(vbool16_t mask, vfloat16m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m2_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat16m1_t test_vfredsum_vs_f16m2_f16m1_m(vbool8_t mask, vfloat16m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m4_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat16m1_t test_vfredsum_vs_f16m4_f16m1_m(vbool4_t mask, vfloat16m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m8_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vfloat16m1_t test_vfredsum_vs_f16m8_f16m1_m(vbool2_t mask, vfloat16m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vfloat32m1_t test_vfredsum_vs_f32m1_f32m1_m(vbool32_t mask, vfloat32m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vfloat32m1_t test_vfredsum_vs_f32m2_f32m1_m(vbool16_t mask, vfloat32m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vfloat32m1_t test_vfredsum_vs_f32m4_f32m1_m(vbool8_t mask, vfloat32m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vfloat32m1_t test_vfredsum_vs_f32m8_f32m1_m(vbool4_t mask, vfloat32m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vfloat64m1_t test_vfredsum_vs_f64m1_f64m1_m(vbool64_t mask, vfloat64m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vfloat64m1_t test_vfredsum_vs_f64m2_f64m1_m(vbool32_t mask, vfloat64m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vfloat64m1_t test_vfredsum_vs_f64m4_f64m1_m(vbool16_t mask, vfloat64m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfwredosum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfwredosum.c index 5033ee93ee098b..c0d2cdd7eb93db 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfwredosum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfwredosum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m1_f32m1(vfloat16m1_t vector, vfloat32m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv2f32.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m2_f32m1(vfloat16m2_t vector, vfloat32m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv2f32.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m4_f32m1(vfloat16m4_t vector, vfloat32m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv2f32.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m8_f32m1(vfloat16m8_t vector, vfloat32m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv1f64.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m1_f64m1(vfloat32m1_t vector, vfloat64m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv1f64.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m2_f64m1(vfloat32m2_t vector, vfloat64m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv1f64.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m4_f64m1(vfloat32m4_t vector, vfloat64m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv1f64.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m8_f64m1(vfloat32m8_t vector, vfloat64m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv2f32.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m1_f32m1_m(vbool16_t mask, vfloat16m1_t vecto } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv2f32.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m2_f32m1_m(vbool8_t mask, vfloat16m2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv2f32.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m4_f32m1_m(vbool4_t mask, vfloat16m4_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv2f32.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m8_f32m1_m(vbool2_t mask, vfloat16m8_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv1f64.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m1_f64m1_m(vbool32_t mask, vfloat32m1_t vecto } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv1f64.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m2_f64m1_m(vbool16_t mask, vfloat32m2_t vecto } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv1f64.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m4_f64m1_m(vbool8_t mask, vfloat32m4_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv1f64.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfwredsum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfwredsum.c index 8a84528034c878..96d6a9ccbff564 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfwredsum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vfwredsum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m1_f32m1(vfloat16m1_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv2f32.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m2_f32m1(vfloat16m2_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv2f32.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m4_f32m1(vfloat16m4_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv2f32.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m8_f32m1(vfloat16m8_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv1f64.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m1_f64m1(vfloat32m1_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv1f64.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m2_f64m1(vfloat32m2_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv1f64.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m4_f64m1(vfloat32m4_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv1f64.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m8_f64m1(vfloat32m8_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv2f32.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m1_f32m1_m(vbool16_t mask, vfloat16m1_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv2f32.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m2_f32m1_m(vbool8_t mask, vfloat16m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv2f32.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m4_f32m1_m(vbool4_t mask, vfloat16m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv2f32.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m8_f32m1_m(vbool2_t mask, vfloat16m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv1f64.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m1_f64m1_m(vbool32_t mask, vfloat32m1_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv1f64.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m2_f64m1_m(vbool16_t mask, vfloat32m2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv1f64.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m4_f64m1_m(vbool8_t mask, vfloat32m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv1f64.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredand.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredand.c index c17683d8ab5fb1..f5f75ee6e2f866 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredand.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredand.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredand_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredand_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredand_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredand_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredand_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredand_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredand_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredand_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredand_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredand_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredand_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredand_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredand_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredand_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredand_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredand_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m1_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredand_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredand_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredand_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredand_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredand_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredand_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredand_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredand_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredand_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredand_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredand_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredand_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredand_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredand_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredand_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m1_t test_vredand_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vredand_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vredand_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m1_t test_vredand_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m1_t test_vredand_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint16m1_t test_vredand_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint16m1_t test_vredand_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint16m1_t test_vredand_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint16m1_t test_vredand_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint32m1_t test_vredand_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint32m1_t test_vredand_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint32m1_t test_vredand_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint32m1_t test_vredand_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint64m1_t test_vredand_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint64m1_t test_vredand_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint64m1_t test_vredand_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint64m1_t test_vredand_vs_i64m8_i64m1_m(vbool8_t mask, vint64m8_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint8m1_t test_vredand_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint8m1_t test_vredand_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint8m1_t test_vredand_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint8m1_t test_vredand_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint16m1_t test_vredand_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint16m1_t test_vredand_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint16m1_t test_vredand_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint16m1_t test_vredand_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint32m1_t test_vredand_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint32m1_t test_vredand_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint32m1_t test_vredand_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint32m1_t test_vredand_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m1_t test_vredand_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m1_t test_vredand_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m1_t test_vredand_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmax.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmax.c index 1db51028d73059..079a5a4b56b2a4 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmax.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmax.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredmax_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredmax_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredmax_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredmax_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredmax_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredmax_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredmax_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredmax_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredmax_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredmax_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredmax_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredmax_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredmax_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredmax_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredmax_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredmax_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint8m1_t test_vredmax_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint8m1_t test_vredmax_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint8m1_t test_vredmax_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint8m1_t test_vredmax_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint16m1_t test_vredmax_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint16m1_t test_vredmax_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint16m1_t test_vredmax_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint16m1_t test_vredmax_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m1_t test_vredmax_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m1_t test_vredmax_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint32m1_t test_vredmax_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint32m1_t test_vredmax_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m1_t test_vredmax_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m1_t test_vredmax_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m1_t test_vredmax_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmaxu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmaxu.c index 549f72d449a35e..8574a5b7842d34 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmaxu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmaxu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vredmaxu_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vredmaxu_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m1_t test_vredmaxu_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m1_t test_vredmaxu_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_vredmaxu_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m1_t test_vredmaxu_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m1_t test_vredmaxu_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m1_t test_vredmaxu_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_vredmaxu_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m1_t test_vredmaxu_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m1_t test_vredmaxu_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m1_t test_vredmaxu_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_vredmaxu_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m1_t test_vredmaxu_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m1_t test_vredmaxu_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m1_t test_vredmaxu_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredmaxu_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredmaxu_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredmaxu_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredmaxu_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredmaxu_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredmaxu_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredmaxu_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredmaxu_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredmaxu_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredmaxu_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredmaxu_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredmaxu_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredmaxu_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredmaxu_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredmaxu_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmin.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmin.c index c05b6471c0f88f..219a90d2f1d91b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmin.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredmin.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredmin_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredmin_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredmin_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredmin_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredmin_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredmin_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredmin_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredmin_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredmin_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredmin_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredmin_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredmin_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredmin_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredmin_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredmin_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredmin_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint8m1_t test_vredmin_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint8m1_t test_vredmin_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint8m1_t test_vredmin_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint8m1_t test_vredmin_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint16m1_t test_vredmin_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint16m1_t test_vredmin_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint16m1_t test_vredmin_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint16m1_t test_vredmin_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m1_t test_vredmin_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m1_t test_vredmin_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint32m1_t test_vredmin_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint32m1_t test_vredmin_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m1_t test_vredmin_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m1_t test_vredmin_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m1_t test_vredmin_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredminu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredminu.c index 5d14af557d2ab0..727a78b9d873b0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredminu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredminu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vredminu_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vredminu_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m1_t test_vredminu_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m1_t test_vredminu_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_vredminu_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m1_t test_vredminu_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m1_t test_vredminu_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m1_t test_vredminu_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_vredminu_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m1_t test_vredminu_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m1_t test_vredminu_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m1_t test_vredminu_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_vredminu_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m1_t test_vredminu_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m1_t test_vredminu_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m1_t test_vredminu_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredminu_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredminu_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredminu_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredminu_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredminu_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredminu_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredminu_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredminu_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredminu_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredminu_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredminu_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredminu_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredminu_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredminu_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredminu_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredor.c index e31b6b46891fc4..167f0ad0d7d651 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredor_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredor_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredor_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredor_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredor_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredor_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredor_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredor_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredor_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredor_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredor_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredor_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredor_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredor_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredor_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredor_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m1_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredor_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredor_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredor_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredor_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredor_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredor_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredor_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredor_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredor_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredor_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredor_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredor_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredor_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredor_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredor_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m1_t test_vredor_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vredor_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vredor_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m1_t test_vredor_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m1_t test_vredor_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint16m1_t test_vredor_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint16m1_t test_vredor_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint16m1_t test_vredor_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint16m1_t test_vredor_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint32m1_t test_vredor_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint32m1_t test_vredor_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint32m1_t test_vredor_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint32 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint32m1_t test_vredor_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint32 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint64m1_t test_vredor_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint64m1_t test_vredor_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint64m1_t test_vredor_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint64m1_t test_vredor_vs_i64m8_i64m1_m(vbool8_t mask, vint64m8_t vector, vint64 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint8m1_t test_vredor_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8m1 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint8m1_t test_vredor_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8m1 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint8m1_t test_vredor_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8m1 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint8m1_t test_vredor_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8m1 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint16m1_t test_vredor_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint16m1_t test_vredor_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vuin } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint16m1_t test_vredor_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vuin } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint16m1_t test_vredor_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vuin } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint32m1_t test_vredor_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint32m1_t test_vredor_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint32m1_t test_vredor_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vuin } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint32m1_t test_vredor_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vuin } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m1_t test_vredor_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m1_t test_vredor_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m1_t test_vredor_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredsum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredsum.c index c59f6c4fd2723a..ba14354a11ccec 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredsum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredsum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredsum_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredsum_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredsum_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredsum_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredsum_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredsum_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredsum_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredsum_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredsum_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredsum_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredsum_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredsum_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredsum_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredsum_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredsum_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredsum_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m1_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredsum_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredsum_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredsum_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredsum_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredsum_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredsum_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredsum_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredsum_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredsum_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredsum_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredsum_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredsum_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredsum_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredsum_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredsum_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m1_t test_vredsum_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vredsum_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vredsum_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m1_t test_vredsum_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m1_t test_vredsum_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint16m1_t test_vredsum_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint16m1_t test_vredsum_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint16m1_t test_vredsum_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint16m1_t test_vredsum_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint32m1_t test_vredsum_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint32m1_t test_vredsum_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint32m1_t test_vredsum_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint32m1_t test_vredsum_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint64m1_t test_vredsum_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint64m1_t test_vredsum_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint64m1_t test_vredsum_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint64m1_t test_vredsum_vs_i64m8_i64m1_m(vbool8_t mask, vint64m8_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint8m1_t test_vredsum_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint8m1_t test_vredsum_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint8m1_t test_vredsum_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint8m1_t test_vredsum_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint16m1_t test_vredsum_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint16m1_t test_vredsum_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint16m1_t test_vredsum_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint16m1_t test_vredsum_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint32m1_t test_vredsum_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint32m1_t test_vredsum_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint32m1_t test_vredsum_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint32m1_t test_vredsum_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m1_t test_vredsum_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m1_t test_vredsum_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m1_t test_vredsum_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredxor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredxor.c index fe41b880fb873f..fa04ae96e57118 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredxor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vredxor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredxor_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredxor_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredxor_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredxor_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredxor_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredxor_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredxor_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredxor_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredxor_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredxor_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredxor_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredxor_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredxor_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredxor_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredxor_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredxor_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m1_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredxor_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredxor_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredxor_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredxor_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredxor_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredxor_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredxor_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredxor_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredxor_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredxor_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredxor_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredxor_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredxor_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredxor_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredxor_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m1_t test_vredxor_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vredxor_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vredxor_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m1_t test_vredxor_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m1_t test_vredxor_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint16m1_t test_vredxor_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint16m1_t test_vredxor_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint16m1_t test_vredxor_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint16m1_t test_vredxor_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint32m1_t test_vredxor_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint32m1_t test_vredxor_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint32m1_t test_vredxor_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint32m1_t test_vredxor_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint64m1_t test_vredxor_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint64m1_t test_vredxor_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint64m1_t test_vredxor_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint64m1_t test_vredxor_vs_i64m8_i64m1_m(vbool8_t mask, vint64m8_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint8m1_t test_vredxor_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint8m1_t test_vredxor_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint8m1_t test_vredxor_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint8m1_t test_vredxor_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint16m1_t test_vredxor_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint16m1_t test_vredxor_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint16m1_t test_vredxor_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint16m1_t test_vredxor_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint32m1_t test_vredxor_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint32m1_t test_vredxor_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint32m1_t test_vredxor_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint32m1_t test_vredxor_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m1_t test_vredxor_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m1_t test_vredxor_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m1_t test_vredxor_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vwredsum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vwredsum.c index 02c7137726ede7..19ab55ba3eca72 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vwredsum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vwredsum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint16m1_t test_vwredsum_vs_i8m1_i16m1(vint8m1_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv4i16.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint16m1_t test_vwredsum_vs_i8m2_i16m1(vint8m2_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv4i16.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint16m1_t test_vwredsum_vs_i8m4_i16m1(vint8m4_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv4i16.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint16m1_t test_vwredsum_vs_i8m8_i16m1(vint8m8_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv2i32.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint32m1_t test_vwredsum_vs_i16m1_i32m1(vint16m1_t vector, vint32m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv2i32.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint32m1_t test_vwredsum_vs_i16m2_i32m1(vint16m2_t vector, vint32m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv2i32.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint32m1_t test_vwredsum_vs_i16m4_i32m1(vint16m4_t vector, vint32m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv2i32.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint32m1_t test_vwredsum_vs_i16m8_i32m1(vint16m8_t vector, vint32m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv1i64.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint64m1_t test_vwredsum_vs_i32m1_i64m1(vint32m1_t vector, vint64m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv1i64.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint64m1_t test_vwredsum_vs_i32m2_i64m1(vint32m2_t vector, vint64m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv1i64.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint64m1_t test_vwredsum_vs_i32m4_i64m1(vint32m4_t vector, vint64m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv1i64.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint64m1_t test_vwredsum_vs_i32m8_i64m1(vint32m8_t vector, vint64m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv4i16.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m1_t test_vwredsum_vs_i8m1_i16m1_m(vbool8_t mask, vint8m1_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv4i16.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m1_t test_vwredsum_vs_i8m2_i16m1_m(vbool4_t mask, vint8m2_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv4i16.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m1_t test_vwredsum_vs_i8m4_i16m1_m(vbool2_t mask, vint8m4_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv4i16.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m1_t test_vwredsum_vs_i8m8_i16m1_m(vbool1_t mask, vint8m8_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv2i32.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vwredsum_vs_i16m1_i32m1_m(vbool16_t mask, vint16m1_t vector, vin } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv2i32.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vwredsum_vs_i16m2_i32m1_m(vbool8_t mask, vint16m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv2i32.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m1_t test_vwredsum_vs_i16m4_i32m1_m(vbool4_t mask, vint16m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv2i32.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m1_t test_vwredsum_vs_i16m8_i32m1_m(vbool2_t mask, vint16m8_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv1i64.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint64m1_t test_vwredsum_vs_i32m1_i64m1_m(vbool32_t mask, vint32m1_t vector, vin } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv1i64.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint64m1_t test_vwredsum_vs_i32m2_i64m1_m(vbool16_t mask, vint32m2_t vector, vin } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv1i64.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint64m1_t test_vwredsum_vs_i32m4_i64m1_m(vbool8_t mask, vint32m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv1i64.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vwredsumu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vwredsumu.c index 3e094d8d4181eb..f8c3990d23b016 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vwredsumu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/thead/vwredsumu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf8_u16m1(vuint8mf8_t vector, vuint16m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8mf4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv2i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf4_u16m1(vuint8mf4_t vector, vuint16m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8mf2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv4i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf2_u16m1(vuint8mf2_t vector, vuint16m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint16m1_t test_vwredsumu_vs_u8m1_u16m1(vuint8m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_vwredsumu_vs_u8m2_u16m1(vuint8m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m1_t test_vwredsumu_vs_u8m4_u16m1(vuint8m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m1_t test_vwredsumu_vs_u8m8_u16m1(vuint8m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16mf4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv1i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint32m1_t test_vwredsumu_vs_u16mf4_u32m1(vuint16mf4_t vector, vuint32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16mf2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv2i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_vwredsumu_vs_u16mf2_u32m1(vuint16mf2_t vector, vuint32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m1_t test_vwredsumu_vs_u16m1_u32m1(vuint16m1_t vector, vuint32m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m1_t test_vwredsumu_vs_u16m2_u32m1(vuint16m2_t vector, vuint32m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m1_t test_vwredsumu_vs_u16m4_u32m1(vuint16m4_t vector, vuint32m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint32m1_t test_vwredsumu_vs_u16m8_u32m1(vuint16m8_t vector, vuint32m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32mf2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv1i64.nxv1i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m1_t test_vwredsumu_vs_u32mf2_u64m1(vuint32mf2_t vector, vuint64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv1i64.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m1_t test_vwredsumu_vs_u32m1_u64m1(vuint32m1_t vector, vuint64m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv1i64.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m1_t test_vwredsumu_vs_u32m2_u64m1(vuint32m2_t vector, vuint64m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv1i64.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint64m1_t test_vwredsumu_vs_u32m4_u64m1(vuint32m4_t vector, vuint64m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv1i64.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint64m1_t test_vwredsumu_vs_u32m8_u64m1(vuint32m8_t vector, vuint64m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8mf8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv1i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf8_u16m1_m(vbool64_t mask, vuint8mf8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8mf4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv2i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf4_u16m1_m(vbool32_t mask, vuint8mf4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8mf2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv4i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf2_u16m1_m(vbool16_t mask, vuint8mf2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vwredsumu_vs_u8m1_u16m1_m(vbool8_t mask, vuint8m1_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vwredsumu_vs_u8m2_u16m1_m(vbool4_t mask, vuint8m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vwredsumu_vs_u8m4_u16m1_m(vbool2_t mask, vuint8m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint16m1_t test_vwredsumu_vs_u8m8_u16m1_m(vbool1_t mask, vuint8m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16mf4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv1i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vwredsumu_vs_u16mf4_u32m1_m(vbool64_t mask, vuint16mf4_t vector } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16mf2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv2i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vwredsumu_vs_u16mf2_u32m1_m(vbool32_t mask, vuint16mf2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vwredsumu_vs_u16m1_u32m1_m(vbool16_t mask, vuint16m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint32m1_t test_vwredsumu_vs_u16m2_u32m1_m(vbool8_t mask, vuint16m2_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint32m1_t test_vwredsumu_vs_u16m4_u32m1_m(vbool4_t mask, vuint16m4_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint32m1_t test_vwredsumu_vs_u16m8_u32m1_m(vbool2_t mask, vuint16m8_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32mf2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv1i64.nxv1i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m1_t test_vwredsumu_vs_u32mf2_u64m1_m(vbool64_t mask, vuint32mf2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv1i64.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint64m1_t test_vwredsumu_vs_u32m1_u64m1_m(vbool32_t mask, vuint32m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv1i64.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint64m1_t test_vwredsumu_vs_u32m2_u64m1_m(vbool16_t mask, vuint32m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv1i64.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint64m1_t test_vwredsumu_vs_u32m4_u64m1_m(vbool8_t mask, vuint32m4_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv1i64.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredmax.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredmax.c index 3be8fde8b24265..6abd88d24c7106 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredmax.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredmax.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_vfredmax_vs_f16m1_f16m1(vfloat16m1_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m2_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m1_t test_vfredmax_vs_f16m2_f16m1(vfloat16m2_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m4_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m1_t test_vfredmax_vs_f16m4_f16m1(vfloat16m4_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m8_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m1_t test_vfredmax_vs_f16m8_f16m1(vfloat16m8_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m1_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m1_t test_vfredmax_vs_f32m1_f32m1(vfloat32m1_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat32m1_t test_vfredmax_vs_f32m2_f32m1(vfloat32m2_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat32m1_t test_vfredmax_vs_f32m4_f32m1(vfloat32m4_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat32m1_t test_vfredmax_vs_f32m8_f32m1(vfloat32m8_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat64m1_t test_vfredmax_vs_f64m1_f64m1(vfloat64m1_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat64m1_t test_vfredmax_vs_f64m2_f64m1(vfloat64m2_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat64m1_t test_vfredmax_vs_f64m4_f64m1(vfloat64m4_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat64m1_t test_vfredmax_vs_f64m8_f64m1(vfloat64m8_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m1_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv4f16.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat16m1_t test_vfredmax_vs_f16m1_f16m1_m(vbool16_t mask, vfloat16m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m2_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat16m1_t test_vfredmax_vs_f16m2_f16m1_m(vbool8_t mask, vfloat16m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m4_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat16m1_t test_vfredmax_vs_f16m4_f16m1_m(vbool4_t mask, vfloat16m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f16m8_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vfloat16m1_t test_vfredmax_vs_f16m8_f16m1_m(vbool2_t mask, vfloat16m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vfloat32m1_t test_vfredmax_vs_f32m1_f32m1_m(vbool32_t mask, vfloat32m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vfloat32m1_t test_vfredmax_vs_f32m2_f32m1_m(vbool16_t mask, vfloat32m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vfloat32m1_t test_vfredmax_vs_f32m4_f32m1_m(vbool8_t mask, vfloat32m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f32m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vfloat32m1_t test_vfredmax_vs_f32m8_f32m1_m(vbool4_t mask, vfloat32m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vfloat64m1_t test_vfredmax_vs_f64m1_f64m1_m(vbool64_t mask, vfloat64m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vfloat64m1_t test_vfredmax_vs_f64m2_f64m1_m(vbool32_t mask, vfloat64m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vfloat64m1_t test_vfredmax_vs_f64m4_f64m1_m(vbool16_t mask, vfloat64m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmax_vs_f64m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmax.mask.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredmin.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredmin.c index 26001a2ba2e77b..2b737e5ad23847 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredmin.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredmin.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_vfredmin_vs_f16m1_f16m1(vfloat16m1_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m2_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m1_t test_vfredmin_vs_f16m2_f16m1(vfloat16m2_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m4_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m1_t test_vfredmin_vs_f16m4_f16m1(vfloat16m4_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m8_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m1_t test_vfredmin_vs_f16m8_f16m1(vfloat16m8_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m1_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m1_t test_vfredmin_vs_f32m1_f32m1(vfloat32m1_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat32m1_t test_vfredmin_vs_f32m2_f32m1(vfloat32m2_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat32m1_t test_vfredmin_vs_f32m4_f32m1(vfloat32m4_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat32m1_t test_vfredmin_vs_f32m8_f32m1(vfloat32m8_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat64m1_t test_vfredmin_vs_f64m1_f64m1(vfloat64m1_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat64m1_t test_vfredmin_vs_f64m2_f64m1(vfloat64m2_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat64m1_t test_vfredmin_vs_f64m4_f64m1(vfloat64m4_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat64m1_t test_vfredmin_vs_f64m8_f64m1(vfloat64m8_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m1_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv4f16.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat16m1_t test_vfredmin_vs_f16m1_f16m1_m(vbool16_t mask, vfloat16m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m2_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat16m1_t test_vfredmin_vs_f16m2_f16m1_m(vbool8_t mask, vfloat16m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m4_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat16m1_t test_vfredmin_vs_f16m4_f16m1_m(vbool4_t mask, vfloat16m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f16m8_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vfloat16m1_t test_vfredmin_vs_f16m8_f16m1_m(vbool2_t mask, vfloat16m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vfloat32m1_t test_vfredmin_vs_f32m1_f32m1_m(vbool32_t mask, vfloat32m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vfloat32m1_t test_vfredmin_vs_f32m2_f32m1_m(vbool16_t mask, vfloat32m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vfloat32m1_t test_vfredmin_vs_f32m4_f32m1_m(vbool8_t mask, vfloat32m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f32m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vfloat32m1_t test_vfredmin_vs_f32m8_f32m1_m(vbool4_t mask, vfloat32m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vfloat64m1_t test_vfredmin_vs_f64m1_f64m1_m(vbool64_t mask, vfloat64m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vfloat64m1_t test_vfredmin_vs_f64m2_f64m1_m(vbool32_t mask, vfloat64m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vfloat64m1_t test_vfredmin_vs_f64m4_f64m1_m(vbool16_t mask, vfloat64m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredmin_vs_f64m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredmin.mask.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredosum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredosum.c index 38f91cc5b18370..c68aad592dd917 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredosum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredosum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_vfredosum_vs_f16m1_f16m1(vfloat16m1_t vector, vfloat16m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m2_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m1_t test_vfredosum_vs_f16m2_f16m1(vfloat16m2_t vector, vfloat16m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m4_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m1_t test_vfredosum_vs_f16m4_f16m1(vfloat16m4_t vector, vfloat16m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m8_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m1_t test_vfredosum_vs_f16m8_f16m1(vfloat16m8_t vector, vfloat16m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m1_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m1_t test_vfredosum_vs_f32m1_f32m1(vfloat32m1_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat32m1_t test_vfredosum_vs_f32m2_f32m1(vfloat32m2_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat32m1_t test_vfredosum_vs_f32m4_f32m1(vfloat32m4_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat32m1_t test_vfredosum_vs_f32m8_f32m1(vfloat32m8_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat64m1_t test_vfredosum_vs_f64m1_f64m1(vfloat64m1_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat64m1_t test_vfredosum_vs_f64m2_f64m1(vfloat64m2_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat64m1_t test_vfredosum_vs_f64m4_f64m1(vfloat64m4_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat64m1_t test_vfredosum_vs_f64m8_f64m1(vfloat64m8_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m1_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv4f16.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat16m1_t test_vfredosum_vs_f16m1_f16m1_m(vbool16_t mask, vfloat16m1_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m2_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat16m1_t test_vfredosum_vs_f16m2_f16m1_m(vbool8_t mask, vfloat16m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m4_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat16m1_t test_vfredosum_vs_f16m4_f16m1_m(vbool4_t mask, vfloat16m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f16m8_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vfloat16m1_t test_vfredosum_vs_f16m8_f16m1_m(vbool2_t mask, vfloat16m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vfloat32m1_t test_vfredosum_vs_f32m1_f32m1_m(vbool32_t mask, vfloat32m1_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vfloat32m1_t test_vfredosum_vs_f32m2_f32m1_m(vbool16_t mask, vfloat32m2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vfloat32m1_t test_vfredosum_vs_f32m4_f32m1_m(vbool8_t mask, vfloat32m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f32m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vfloat32m1_t test_vfredosum_vs_f32m8_f32m1_m(vbool4_t mask, vfloat32m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vfloat64m1_t test_vfredosum_vs_f64m1_f64m1_m(vbool64_t mask, vfloat64m1_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vfloat64m1_t test_vfredosum_vs_f64m2_f64m1_m(vbool32_t mask, vfloat64m2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vfloat64m1_t test_vfredosum_vs_f64m4_f64m1_m(vbool16_t mask, vfloat64m4_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfredosum_vs_f64m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredosum.mask.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredsum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredsum.c index 9933d9d7ef5edd..1f957904255669 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredsum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfredsum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1_t test_vfredsum_vs_f16m1_f16m1(vfloat16m1_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m2_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m1_t test_vfredsum_vs_f16m2_f16m1(vfloat16m2_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m4_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m1_t test_vfredsum_vs_f16m4_f16m1(vfloat16m4_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m8_f16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat16m1_t test_vfredsum_vs_f16m8_f16m1(vfloat16m8_t vector, vfloat16m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m1_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat32m1_t test_vfredsum_vs_f32m1_f32m1(vfloat32m1_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat32m1_t test_vfredsum_vs_f32m2_f32m1(vfloat32m2_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat32m1_t test_vfredsum_vs_f32m4_f32m1(vfloat32m4_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat32m1_t test_vfredsum_vs_f32m8_f32m1(vfloat32m8_t vector, vfloat32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat64m1_t test_vfredsum_vs_f64m1_f64m1(vfloat64m1_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat64m1_t test_vfredsum_vs_f64m2_f64m1(vfloat64m2_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat64m1_t test_vfredsum_vs_f64m4_f64m1(vfloat64m4_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat64m1_t test_vfredsum_vs_f64m8_f64m1(vfloat64m8_t vector, vfloat64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m1_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv4f16.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat16m1_t test_vfredsum_vs_f16m1_f16m1_m(vbool16_t mask, vfloat16m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m2_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv4f16.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat16m1_t test_vfredsum_vs_f16m2_f16m1_m(vbool8_t mask, vfloat16m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m4_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv4f16.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat16m1_t test_vfredsum_vs_f16m4_f16m1_m(vbool4_t mask, vfloat16m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f16m8_f16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv4f16.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vfloat16m1_t test_vfredsum_vs_f16m8_f16m1_m(vbool2_t mask, vfloat16m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv2f32.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vfloat32m1_t test_vfredsum_vs_f32m1_f32m1_m(vbool32_t mask, vfloat32m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv2f32.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vfloat32m1_t test_vfredsum_vs_f32m2_f32m1_m(vbool16_t mask, vfloat32m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv2f32.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vfloat32m1_t test_vfredsum_vs_f32m4_f32m1_m(vbool8_t mask, vfloat32m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f32m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv2f32.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vfloat32m1_t test_vfredsum_vs_f32m8_f32m1_m(vbool4_t mask, vfloat32m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv1f64.nxv1f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vfloat64m1_t test_vfredsum_vs_f64m1_f64m1_m(vbool64_t mask, vfloat64m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv1f64.nxv2f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vfloat64m1_t test_vfredsum_vs_f64m2_f64m1_m(vbool32_t mask, vfloat64m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv1f64.nxv4f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vfloat64m1_t test_vfredsum_vs_f64m4_f64m1_m(vbool16_t mask, vfloat64m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfredsum_vs_f64m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfredsum.mask.nxv1f64.nxv8f64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfwredosum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfwredosum.c index 809ec0e8e87843..4dcf67987a6317 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfwredosum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfwredosum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m1_f32m1(vfloat16m1_t vector, vfloat32m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv2f32.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m2_f32m1(vfloat16m2_t vector, vfloat32m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv2f32.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m4_f32m1(vfloat16m4_t vector, vfloat32m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv2f32.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m8_f32m1(vfloat16m8_t vector, vfloat32m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv1f64.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m1_f64m1(vfloat32m1_t vector, vfloat64m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv1f64.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m2_f64m1(vfloat32m2_t vector, vfloat64m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv1f64.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m4_f64m1(vfloat32m4_t vector, vfloat64m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.nxv1f64.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m8_f64m1(vfloat32m8_t vector, vfloat64m1_t sc } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv2f32.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m1_f32m1_m(vbool16_t mask, vfloat16m1_t vecto } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv2f32.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m2_f32m1_m(vbool8_t mask, vfloat16m2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv2f32.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m4_f32m1_m(vbool4_t mask, vfloat16m4_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f16m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv2f32.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat32m1_t test_vfwredosum_vs_f16m8_f32m1_m(vbool2_t mask, vfloat16m8_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv1f64.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m1_f64m1_m(vbool32_t mask, vfloat32m1_t vecto } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv1f64.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m2_f64m1_m(vbool16_t mask, vfloat32m2_t vecto } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv1f64.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat64m1_t test_vfwredosum_vs_f32m4_f64m1_m(vbool8_t mask, vfloat32m4_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredosum_vs_f32m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredosum.mask.nxv1f64.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfwredsum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfwredsum.c index 38e2b9c2141e3c..569718415ff7cb 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfwredsum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vfwredsum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m1_f32m1(vfloat16m1_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m2_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv2f32.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m2_f32m1(vfloat16m2_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m4_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv2f32.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m4_f32m1(vfloat16m4_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m8_f32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv2f32.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m8_f32m1(vfloat16m8_t vector, vfloat32m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m1_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv1f64.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m1_f64m1(vfloat32m1_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m2_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv1f64.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m2_f64m1(vfloat32m2_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m4_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv1f64.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m4_f64m1(vfloat32m4_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m8_f64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.nxv1f64.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m8_f64m1(vfloat32m8_t vector, vfloat64m1_t sca } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m1_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv2f32.nxv4f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m1_f32m1_m(vbool16_t mask, vfloat16m1_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m2_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv2f32.nxv8f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m2_f32m1_m(vbool8_t mask, vfloat16m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m4_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv2f32.nxv16f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m4_f32m1_m(vbool4_t mask, vfloat16m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f16m8_f32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv2f32.nxv32f16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vfloat32m1_t test_vfwredsum_vs_f16m8_f32m1_m(vbool2_t mask, vfloat16m8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m1_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv1f64.nxv2f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m1_f64m1_m(vbool32_t mask, vfloat32m1_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m2_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv1f64.nxv4f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m2_f64m1_m(vbool16_t mask, vfloat32m2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m4_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv1f64.nxv8f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vfloat64m1_t test_vfwredsum_vs_f32m4_f64m1_m(vbool8_t mask, vfloat32m4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vfwredsum_vs_f32m8_f64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vfwredsum.mask.nxv1f64.nxv16f32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 7, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredand.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredand.c index f7bb80ab7a8c0c..e47ecafe91ddea 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredand.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredand.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredand_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredand_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredand_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredand_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredand_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredand_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredand_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredand_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredand_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredand_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredand_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredand_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredand_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredand_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredand_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredand_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m1_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredand_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredand_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredand_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredand_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredand_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredand_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredand_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredand_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredand_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredand_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredand_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredand_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredand_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredand_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredand_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m1_t test_vredand_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vredand_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vredand_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m1_t test_vredand_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m1_t test_vredand_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint16m1_t test_vredand_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint16m1_t test_vredand_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint16m1_t test_vredand_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint16m1_t test_vredand_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint32m1_t test_vredand_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint32m1_t test_vredand_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint32m1_t test_vredand_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint32m1_t test_vredand_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint64m1_t test_vredand_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint64m1_t test_vredand_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint64m1_t test_vredand_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint64m1_t test_vredand_vs_i64m8_i64m1_m(vbool8_t mask, vint64m8_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint8m1_t test_vredand_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint8m1_t test_vredand_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint8m1_t test_vredand_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint8m1_t test_vredand_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint16m1_t test_vredand_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint16m1_t test_vredand_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint16m1_t test_vredand_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint16m1_t test_vredand_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint32m1_t test_vredand_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint32m1_t test_vredand_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint32m1_t test_vredand_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint32m1_t test_vredand_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m1_t test_vredand_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m1_t test_vredand_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m1_t test_vredand_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredand_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredand.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmax.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmax.c index 1ee510af5a5f10..7d606b2dca6b9c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmax.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmax.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredmax_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredmax_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredmax_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredmax_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredmax_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredmax_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredmax_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredmax_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredmax_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredmax_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredmax_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredmax_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredmax_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredmax_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredmax_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredmax_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint8m1_t test_vredmax_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint8m1_t test_vredmax_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint8m1_t test_vredmax_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint8m1_t test_vredmax_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint16m1_t test_vredmax_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint16m1_t test_vredmax_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint16m1_t test_vredmax_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint16m1_t test_vredmax_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m1_t test_vredmax_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m1_t test_vredmax_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint32m1_t test_vredmax_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint32m1_t test_vredmax_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m1_t test_vredmax_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m1_t test_vredmax_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m1_t test_vredmax_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmax_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmax.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmaxu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmaxu.c index 71725aab99590e..71266415819485 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmaxu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmaxu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vredmaxu_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vredmaxu_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m1_t test_vredmaxu_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m1_t test_vredmaxu_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_vredmaxu_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m1_t test_vredmaxu_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m1_t test_vredmaxu_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m1_t test_vredmaxu_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_vredmaxu_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m1_t test_vredmaxu_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m1_t test_vredmaxu_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m1_t test_vredmaxu_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_vredmaxu_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m1_t test_vredmaxu_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m1_t test_vredmaxu_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m1_t test_vredmaxu_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredmaxu_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredmaxu_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredmaxu_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredmaxu_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredmaxu_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredmaxu_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredmaxu_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredmaxu_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredmaxu_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredmaxu_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredmaxu_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredmaxu_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredmaxu_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredmaxu_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredmaxu_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredmaxu_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmaxu.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmin.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmin.c index 08aef3f0b9597d..1f96c3c5a2aee1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmin.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredmin.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredmin_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredmin_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredmin_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredmin_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredmin_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredmin_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredmin_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredmin_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredmin_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredmin_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredmin_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredmin_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredmin_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredmin_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredmin_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredmin_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint8m1_t test_vredmin_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint8m1_t test_vredmin_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint8m1_t test_vredmin_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint8m1_t test_vredmin_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint16m1_t test_vredmin_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint16m1_t test_vredmin_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint16m1_t test_vredmin_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint16m1_t test_vredmin_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m1_t test_vredmin_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m1_t test_vredmin_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint32m1_t test_vredmin_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint32m1_t test_vredmin_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m1_t test_vredmin_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m1_t test_vredmin_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m1_t test_vredmin_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredmin_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredmin.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredminu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredminu.c index 2197567c9ff6fc..bcdb6ed8df7d27 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredminu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredminu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vredminu_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vredminu_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m1_t test_vredminu_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m1_t test_vredminu_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_vredminu_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m1_t test_vredminu_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m1_t test_vredminu_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m1_t test_vredminu_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_vredminu_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m1_t test_vredminu_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m1_t test_vredminu_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m1_t test_vredminu_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m1_t test_vredminu_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m1_t test_vredminu_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m1_t test_vredminu_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m1_t test_vredminu_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredminu_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredminu_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredminu_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredminu_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8 } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredminu_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredminu_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredminu_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredminu_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredminu_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredminu_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredminu_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredminu_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredminu_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredminu_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredminu_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vredminu_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredminu.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredor.c index 2f33290cd2bb03..6e069dcb3405b4 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredor_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredor_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredor_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredor_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredor_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredor_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredor_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredor_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredor_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredor_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredor_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredor_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredor_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredor_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredor_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredor_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m1_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredor_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredor_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredor_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredor_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredor_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredor_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredor_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredor_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredor_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredor_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredor_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredor_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredor_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredor_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredor_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m1_t test_vredor_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, s } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vredor_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vredor_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m1_t test_vredor_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m1_t test_vredor_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint16m1_t test_vredor_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint16m1_t test_vredor_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint16m1_t test_vredor_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint16m1_t test_vredor_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint32m1_t test_vredor_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint32m1_t test_vredor_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint32m1_t test_vredor_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint32 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint32m1_t test_vredor_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint32 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint64m1_t test_vredor_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint64m1_t test_vredor_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint64m1_t test_vredor_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint64m1_t test_vredor_vs_i64m8_i64m1_m(vbool8_t mask, vint64m8_t vector, vint64 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint8m1_t test_vredor_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8m1 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint8m1_t test_vredor_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8m1 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint8m1_t test_vredor_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8m1 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint8m1_t test_vredor_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8m1 } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint16m1_t test_vredor_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint16m1_t test_vredor_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vuin } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint16m1_t test_vredor_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vuin } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint16m1_t test_vredor_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vuin } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint32m1_t test_vredor_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint32m1_t test_vredor_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint32m1_t test_vredor_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vuin } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint32m1_t test_vredor_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vuin } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m1_t test_vredor_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m1_t test_vredor_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m1_t test_vredor_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredor_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredor.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredsum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredsum.c index fc7a5e7e7d8f87..5d1d120e1c858a 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredsum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredsum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredsum_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredsum_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredsum_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredsum_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredsum_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredsum_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredsum_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredsum_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredsum_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredsum_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredsum_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredsum_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredsum_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredsum_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredsum_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredsum_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m1_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredsum_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredsum_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredsum_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredsum_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredsum_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredsum_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredsum_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredsum_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredsum_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredsum_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredsum_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredsum_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredsum_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredsum_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredsum_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m1_t test_vredsum_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vredsum_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vredsum_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m1_t test_vredsum_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m1_t test_vredsum_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint16m1_t test_vredsum_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint16m1_t test_vredsum_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint16m1_t test_vredsum_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint16m1_t test_vredsum_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint32m1_t test_vredsum_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint32m1_t test_vredsum_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint32m1_t test_vredsum_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint32m1_t test_vredsum_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint64m1_t test_vredsum_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint64m1_t test_vredsum_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint64m1_t test_vredsum_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint64m1_t test_vredsum_vs_i64m8_i64m1_m(vbool8_t mask, vint64m8_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint8m1_t test_vredsum_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint8m1_t test_vredsum_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint8m1_t test_vredsum_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint8m1_t test_vredsum_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint16m1_t test_vredsum_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint16m1_t test_vredsum_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint16m1_t test_vredsum_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint16m1_t test_vredsum_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint32m1_t test_vredsum_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint32m1_t test_vredsum_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint32m1_t test_vredsum_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint32m1_t test_vredsum_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m1_t test_vredsum_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m1_t test_vredsum_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m1_t test_vredsum_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredsum_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredsum.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredxor.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredxor.c index c6d747e860302f..50d04b461c05f0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredxor.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vredxor.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vredxor_vs_i8m1_i8m1(vint8m1_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m2_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vredxor_vs_i8m2_i8m1(vint8m2_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m4_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m1_t test_vredxor_vs_i8m4_i8m1(vint8m4_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m8_i8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m1_t test_vredxor_vs_i8m8_i8m1(vint8m8_t vector, vint8m1_t scalar, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m1_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vredxor_vs_i16m1_i16m1(vint16m1_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m1_t test_vredxor_vs_i16m2_i16m1(vint16m2_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m1_t test_vredxor_vs_i16m4_i16m1(vint16m4_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m1_t test_vredxor_vs_i16m8_i16m1(vint16m8_t vector, vint16m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vredxor_vs_i32m1_i32m1(vint32m1_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m1_t test_vredxor_vs_i32m2_i32m1(vint32m2_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m1_t test_vredxor_vs_i32m4_i32m1(vint32m4_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m1_t test_vredxor_vs_i32m8_i32m1(vint32m8_t vector, vint32m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vredxor_vs_i64m1_i64m1(vint64m1_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m1_t test_vredxor_vs_i64m2_i64m1(vint64m2_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m1_t test_vredxor_vs_i64m4_i64m1(vint64m4_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m1_t test_vredxor_vs_i64m8_i64m1(vint64m8_t vector, vint64m1_t scalar, siz } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m1_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vredxor_vs_u8m1_u8m1(vuint8m1_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m2_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m1_t test_vredxor_vs_u8m2_u8m1(vuint8m2_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m4_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m1_t test_vredxor_vs_u8m4_u8m1(vuint8m4_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m8_u8m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m1_t test_vredxor_vs_u8m8_u8m1(vuint8m8_t vector, vuint8m1_t scalar, size_ } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vredxor_vs_u16m1_u16m1(vuint16m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vredxor_vs_u16m2_u16m1(vuint16m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vredxor_vs_u16m4_u16m1(vuint16m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vredxor_vs_u16m8_u16m1(vuint16m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vredxor_vs_u32m1_u32m1(vuint32m1_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vredxor_vs_u32m2_u32m1(vuint32m2_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vredxor_vs_u32m4_u32m1(vuint32m4_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vredxor_vs_u32m8_u32m1(vuint32m8_t vector, vuint32m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vredxor_vs_u64m1_u64m1(vuint64m1_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m1_t test_vredxor_vs_u64m2_u64m1(vuint64m2_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m1_t test_vredxor_vs_u64m4_u64m1(vuint64m4_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m1_t test_vredxor_vs_u64m8_u64m1(vuint64m8_t vector, vuint64m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m1_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vredxor_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m2_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vredxor_vs_i8m2_i8m1_m(vbool4_t mask, vint8m2_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m4_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m1_t test_vredxor_vs_i8m4_i8m1_m(vbool2_t mask, vint8m4_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i8m8_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m1_t test_vredxor_vs_i8m8_i8m1_m(vbool1_t mask, vint8m8_t vector, vint8m1_t } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint16m1_t test_vredxor_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint16m1_t test_vredxor_vs_i16m2_i16m1_m(vbool8_t mask, vint16m2_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint16m1_t test_vredxor_vs_i16m4_i16m1_m(vbool4_t mask, vint16m4_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i16m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint16m1_t test_vredxor_vs_i16m8_i16m1_m(vbool2_t mask, vint16m8_t vector, vint1 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint32m1_t test_vredxor_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint32m1_t test_vredxor_vs_i32m2_i32m1_m(vbool16_t mask, vint32m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint32m1_t test_vredxor_vs_i32m4_i32m1_m(vbool8_t mask, vint32m4_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i32m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint32m1_t test_vredxor_vs_i32m8_i32m1_m(vbool4_t mask, vint32m8_t vector, vint3 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint64m1_t test_vredxor_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint64m1_t test_vredxor_vs_i64m2_i64m1_m(vbool32_t mask, vint64m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint64m1_t test_vredxor_vs_i64m4_i64m1_m(vbool16_t mask, vint64m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_i64m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint64m1_t test_vredxor_vs_i64m8_i64m1_m(vbool8_t mask, vint64m8_t vector, vint6 } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m1_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint8m1_t test_vredxor_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m2_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint8m1_t test_vredxor_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m2_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m4_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint8m1_t test_vredxor_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m4_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u8m8_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv8i8.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint8m1_t test_vredxor_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m8_t vector, vuint8m } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint16m1_t test_vredxor_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint16m1_t test_vredxor_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint16m1_t test_vredxor_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u16m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv4i16.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint16m1_t test_vredxor_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint32m1_t test_vredxor_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint32m1_t test_vredxor_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint32m1_t test_vredxor_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u32m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv2i32.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint32m1_t test_vredxor_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv1i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m1_t test_vredxor_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv2i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m1_t test_vredxor_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m2_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv4i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m1_t test_vredxor_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m4_t vector, vu } // CHECK-RV64-LABEL: define dso_local @test_vredxor_vs_u64m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vredxor.mask.nxv1i64.nxv8i64.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vwredsum.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vwredsum.c index a9a82f29f2a379..e42e2b07d3f9c4 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vwredsum.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vwredsum.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint16m1_t test_vwredsum_vs_i8m1_i16m1(vint8m1_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m2_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv4i16.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint16m1_t test_vwredsum_vs_i8m2_i16m1(vint8m2_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m4_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv4i16.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint16m1_t test_vwredsum_vs_i8m4_i16m1(vint8m4_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m8_i16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv4i16.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint16m1_t test_vwredsum_vs_i8m8_i16m1(vint8m8_t vector, vint16m1_t scalar, size } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m1_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv2i32.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint32m1_t test_vwredsum_vs_i16m1_i32m1(vint16m1_t vector, vint32m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m2_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv2i32.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint32m1_t test_vwredsum_vs_i16m2_i32m1(vint16m2_t vector, vint32m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m4_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv2i32.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint32m1_t test_vwredsum_vs_i16m4_i32m1(vint16m4_t vector, vint32m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m8_i32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv2i32.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint32m1_t test_vwredsum_vs_i16m8_i32m1(vint16m8_t vector, vint32m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m1_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv1i64.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint64m1_t test_vwredsum_vs_i32m1_i64m1(vint32m1_t vector, vint64m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m2_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv1i64.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint64m1_t test_vwredsum_vs_i32m2_i64m1(vint32m2_t vector, vint64m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m4_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv1i64.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint64m1_t test_vwredsum_vs_i32m4_i64m1(vint32m4_t vector, vint64m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m8_i64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.nxv1i64.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint64m1_t test_vwredsum_vs_i32m8_i64m1(vint32m8_t vector, vint64m1_t scalar, si } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m1_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv4i16.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m1_t test_vwredsum_vs_i8m1_i16m1_m(vbool8_t mask, vint8m1_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m2_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv4i16.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m1_t test_vwredsum_vs_i8m2_i16m1_m(vbool4_t mask, vint8m2_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m4_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv4i16.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m1_t test_vwredsum_vs_i8m4_i16m1_m(vbool2_t mask, vint8m4_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i8m8_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv4i16.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m1_t test_vwredsum_vs_i8m8_i16m1_m(vbool1_t mask, vint8m8_t vector, vint16 } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m1_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv2i32.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vwredsum_vs_i16m1_i32m1_m(vbool16_t mask, vint16m1_t vector, vin } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m2_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv2i32.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vwredsum_vs_i16m2_i32m1_m(vbool8_t mask, vint16m2_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m4_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv2i32.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m1_t test_vwredsum_vs_i16m4_i32m1_m(vbool4_t mask, vint16m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i16m8_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv2i32.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m1_t test_vwredsum_vs_i16m8_i32m1_m(vbool2_t mask, vint16m8_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m1_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv1i64.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint64m1_t test_vwredsum_vs_i32m1_i64m1_m(vbool32_t mask, vint32m1_t vector, vin } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m2_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv1i64.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint64m1_t test_vwredsum_vs_i32m2_i64m1_m(vbool16_t mask, vint32m2_t vector, vin } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m4_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv1i64.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint64m1_t test_vwredsum_vs_i32m4_i64m1_m(vbool8_t mask, vint32m4_t vector, vint } // CHECK-RV64-LABEL: define dso_local @test_vwredsum_vs_i32m8_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsum.mask.nxv1i64.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vwredsumu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vwredsumu.c index 89957cf9ea1165..6e64776e6cfc7b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vwredsumu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-reduction/wrappers/vwredsumu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -target-feature +d -disable-O0-optnone \ // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf8_u16m1(vuint8mf8_t vector, vuint16m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8mf4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv2i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf4_u16m1(vuint8mf4_t vector, vuint16m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8mf2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv4i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf2_u16m1(vuint8mf2_t vector, vuint16m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m1_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint16m1_t test_vwredsumu_vs_u8m1_u16m1(vuint8m1_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m2_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1_t test_vwredsumu_vs_u8m2_u16m1(vuint8m2_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m4_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m1_t test_vwredsumu_vs_u8m4_u16m1(vuint8m4_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m8_u16m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv4i16.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m1_t test_vwredsumu_vs_u8m8_u16m1(vuint8m8_t vector, vuint16m1_t scalar, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16mf4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv1i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint32m1_t test_vwredsumu_vs_u16mf4_u32m1(vuint16mf4_t vector, vuint32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16mf2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv2i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m1_t test_vwredsumu_vs_u16mf2_u32m1(vuint16mf2_t vector, vuint32m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m1_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m1_t test_vwredsumu_vs_u16m1_u32m1(vuint16m1_t vector, vuint32m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m2_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m1_t test_vwredsumu_vs_u16m2_u32m1(vuint16m2_t vector, vuint32m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m4_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m1_t test_vwredsumu_vs_u16m4_u32m1(vuint16m4_t vector, vuint32m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m8_u32m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv2i32.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint32m1_t test_vwredsumu_vs_u16m8_u32m1(vuint16m8_t vector, vuint32m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32mf2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv1i64.nxv1i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m1_t test_vwredsumu_vs_u32mf2_u64m1(vuint32mf2_t vector, vuint64m1_t scal } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m1_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv1i64.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m1_t test_vwredsumu_vs_u32m1_u64m1(vuint32m1_t vector, vuint64m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m2_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv1i64.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m1_t test_vwredsumu_vs_u32m2_u64m1(vuint32m2_t vector, vuint64m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m4_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv1i64.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint64m1_t test_vwredsumu_vs_u32m4_u64m1(vuint32m4_t vector, vuint64m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m8_u64m1 -// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.nxv1i64.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint64m1_t test_vwredsumu_vs_u32m8_u64m1(vuint32m8_t vector, vuint64m1_t scalar } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8mf8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv1i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf8_u16m1_m(vbool64_t mask, vuint8mf8_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8mf4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv2i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf4_u16m1_m(vbool32_t mask, vuint8mf4_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8mf2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv4i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vwredsumu_vs_u8mf2_u16m1_m(vbool16_t mask, vuint8mf2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m1_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv8i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m1_t test_vwredsumu_vs_u8m1_u16m1_m(vbool8_t mask, vuint8m1_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m2_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv16i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m1_t test_vwredsumu_vs_u8m2_u16m1_m(vbool4_t mask, vuint8m2_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m4_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv32i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m1_t test_vwredsumu_vs_u8m4_u16m1_m(vbool2_t mask, vuint8m4_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u8m8_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv4i16.nxv64i8.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint16m1_t test_vwredsumu_vs_u8m8_u16m1_m(vbool1_t mask, vuint8m8_t vector, vui } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16mf4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv1i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m1_t test_vwredsumu_vs_u16mf4_u32m1_m(vbool64_t mask, vuint16mf4_t vector } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16mf2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv2i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m1_t test_vwredsumu_vs_u16mf2_u32m1_m(vbool32_t mask, vuint16mf2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m1_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv4i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m1_t test_vwredsumu_vs_u16m1_u32m1_m(vbool16_t mask, vuint16m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m2_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv8i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint32m1_t test_vwredsumu_vs_u16m2_u32m1_m(vbool8_t mask, vuint16m2_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m4_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv16i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint32m1_t test_vwredsumu_vs_u16m4_u32m1_m(vbool4_t mask, vuint16m4_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u16m8_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv2i32.nxv32i16.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint32m1_t test_vwredsumu_vs_u16m8_u32m1_m(vbool2_t mask, vuint16m8_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32mf2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv1i64.nxv1i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m1_t test_vwredsumu_vs_u32mf2_u64m1_m(vbool64_t mask, vuint32mf2_t vector } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m1_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv1i64.nxv2i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint64m1_t test_vwredsumu_vs_u32m1_u64m1_m(vbool32_t mask, vuint32m1_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m2_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv1i64.nxv4i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint64m1_t test_vwredsumu_vs_u32m2_u64m1_m(vbool16_t mask, vuint32m2_t vector, } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m4_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv1i64.nxv8i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint64m1_t test_vwredsumu_vs_u32m4_u64m1_m(vbool8_t mask, vuint32m4_t vector, v } // CHECK-RV64-LABEL: define dso_local @test_vwredsumu_vs_u32m8_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VECTOR:%.*]], [[SCALAR:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwredsumu.mask.nxv1i64.nxv16i32.i64( poison, [[VECTOR]], [[SCALAR]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vadd.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vadd.c index 10279c7487a7b4..6f26fa38dce290 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vadd.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vadd.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vadd_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vadd_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vadd_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vadd_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vadd_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vadd_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vadd_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vadd_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vadd_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vadd_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vadd_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vadd_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vadd_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vadd_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vadd_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vadd_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vadd_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vadd_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vadd_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vadd_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vadd_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vadd_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vadd_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vadd_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vadd_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vadd_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vadd_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vadd_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vadd_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vadd_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vadd_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vadd_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vadd_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vadd_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vadd_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vadd_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vadd_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vadd_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vadd_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vadd_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vadd_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vadd_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vadd_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vadd_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vadd_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vadd_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vadd_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vadd_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vadd_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vadd_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vadd_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vadd_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vadd_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vadd_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vadd_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vadd_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vadd_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vadd_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vadd_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vadd_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vadd_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vadd_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vadd_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vneg.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vneg.c index 8f9d36a0c0a6ef..f1b00cfbef1370 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vneg.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vneg.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vneg_v_i8m1(vint8m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_vneg_v_i8m2(vint8m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_vneg_v_i8m4(vint8m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_vneg_v_i8m8(vint8m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vneg_v_i16m1(vint16m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_vneg_v_i16m2(vint16m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_vneg_v_i16m4(vint16m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_vneg_v_i16m8(vint16m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vneg_v_i32m1(vint32m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_vneg_v_i32m2(vint32m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_vneg_v_i32m4(vint32m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_vneg_v_i32m8(vint32m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vneg_v_i64m1(vint64m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_vneg_v_i64m2(vint64m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_vneg_v_i64m4(vint64m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vrsub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vrsub.c index ac65dcae869a73..07209ced3bfb5f 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vrsub.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vrsub.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vrsub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_vrsub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_vrsub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_vrsub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vrsub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_vrsub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_vrsub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_vrsub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vrsub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_vrsub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_vrsub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_vrsub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vrsub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_vrsub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_vrsub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m8_t test_vrsub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vrsub_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m2_t test_vrsub_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m4_t test_vrsub_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m8_t test_vrsub_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vrsub_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m2_t test_vrsub_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m4_t test_vrsub_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m8_t test_vrsub_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vrsub_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m2_t test_vrsub_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m4_t test_vrsub_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m8_t test_vrsub_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vrsub_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_vrsub_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m4_t test_vrsub_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vsub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vsub.c index b2577fec84c2ff..0cb795708ef745 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vsub.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vsub.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vsub_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vsub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vsub_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vsub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vsub_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vsub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vsub_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vsub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vsub_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vsub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vsub_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vsub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vsub_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vsub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vsub_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vsub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vsub_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vsub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vsub_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vsub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vsub_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vsub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vsub_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vsub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vsub_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vsub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vsub_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vsub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vsub_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vsub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vsub_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vsub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vsub_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vsub_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vsub_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vsub_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vsub_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vsub_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vsub_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vsub_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vsub_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vsub_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vsub_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vsub_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vsub_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vsub_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vsub_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vsub_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vsub_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vsub_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vsub_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vsub_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vsub_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vsub_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vsub_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vsub_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vsub_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vsub_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vsub_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vsub_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vsub_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vsub_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vsub_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vadd.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vadd.c index 572aa571b4ef6b..b27bcf7b935702 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vadd.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vadd.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vadd_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vadd_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vadd_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vadd_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vadd_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vadd_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vadd_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vadd_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vadd_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vadd_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vadd_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vadd_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vadd_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vadd_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vadd_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vadd_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vadd_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vadd_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vadd_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vadd_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vadd_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vadd_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vadd_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vadd_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vadd_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vadd_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vadd_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vadd_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vadd_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vadd_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vadd_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vadd_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vadd_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vadd_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vadd_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vadd_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vadd_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vadd_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vadd_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vadd_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vadd_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vadd_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vadd_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vadd_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vadd_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vadd_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vadd_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vadd_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vadd_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vadd_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vadd_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vadd_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vadd_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vadd_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vadd_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vadd_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vadd_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vadd_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vadd_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vadd_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vadd_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vadd_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vadd_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vneg.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vneg.c index 7a8be59ae85b00..c2cc53233a803f 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vneg.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vneg.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vneg_v_i8m1(vint8m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_vneg_v_i8m2(vint8m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_vneg_v_i8m4(vint8m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_vneg_v_i8m8(vint8m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vneg_v_i16m1(vint16m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_vneg_v_i16m2(vint16m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_vneg_v_i16m4(vint16m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_vneg_v_i16m8(vint16m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vneg_v_i32m1(vint32m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_vneg_v_i32m2(vint32m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_vneg_v_i32m4(vint32m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_vneg_v_i32m8(vint32m8_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vneg_v_i64m1(vint64m1_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_vneg_v_i64m2(vint64m2_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_vneg_v_i64m4(vint64m4_t op1, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vrsub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vrsub.c index c4afe312ba9a11..c9ffa392abee34 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vrsub.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vrsub.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vrsub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m2_t test_vrsub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m4_t test_vrsub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m8_t test_vrsub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m1_t test_vrsub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m2_t test_vrsub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_vrsub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m8_t test_vrsub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m1_t test_vrsub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m2_t test_vrsub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m4_t test_vrsub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_vrsub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m1_t test_vrsub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_vrsub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_vrsub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m8_t test_vrsub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint8m1_t test_vrsub_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint8m2_t test_vrsub_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint8m4_t test_vrsub_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint8m8_t test_vrsub_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m1_t test_vrsub_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m2_t test_vrsub_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m4_t test_vrsub_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m8_t test_vrsub_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m1_t test_vrsub_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m2_t test_vrsub_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m4_t test_vrsub_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m8_t test_vrsub_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m1_t test_vrsub_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_vrsub_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m4_t test_vrsub_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vsub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vsub.c index ac55e5f88b354d..9cc36b5ccf9cfe 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vsub.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vsub.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vsub_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vsub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vsub_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vsub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vsub_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vsub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vsub_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vsub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vsub_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vsub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vsub_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vsub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vsub_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vsub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vsub_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vsub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vsub_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vsub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vsub_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vsub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vsub_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vsub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vsub_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vsub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vsub_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vsub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vsub_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vsub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vsub_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vsub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vsub_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vsub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vsub_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vsub_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vsub_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vsub_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vsub_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vsub_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vsub_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vsub_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vsub_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vsub_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vsub_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vsub_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vsub_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vsub_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vsub_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vsub_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vsub_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vsub_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vsub_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vsub_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vsub_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vsub_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vsub_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vsub_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vsub_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vsub_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vsub_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vsub_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vsub_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vsub_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vsub_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vdiv.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vdiv.c index f3e45ddcc35ee3..f9fa3069513580 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vdiv.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vdiv.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vdiv_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vdiv_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vdiv_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vdiv_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vdiv_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vdiv_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vdiv_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vdiv_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vdiv_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vdiv_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vdiv_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vdiv_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vdiv_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vdiv_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vdiv_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vdiv_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vdiv_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vdiv_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vdiv_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vdiv_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vdiv_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vdiv_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vdiv_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vdiv_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vdiv_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vdiv_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vdiv_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vdiv_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vdiv_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vdiv_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vdiv_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vdiv_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vdiv_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vdiv_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m2_t test_vdiv_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m2_t test_vdiv_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint8m4_t test_vdiv_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint8m4_t test_vdiv_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint8m8_t test_vdiv_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint8m8_t test_vdiv_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint16m1_t test_vdiv_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint16m1_t test_vdiv_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint16m2_t test_vdiv_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint16m2_t test_vdiv_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint16m4_t test_vdiv_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint16m4_t test_vdiv_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint16m8_t test_vdiv_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint16m8_t test_vdiv_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vint32m1_t test_vdiv_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vint32m1_t test_vdiv_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vint32m2_t test_vdiv_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vint32m2_t test_vdiv_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vint32m4_t test_vdiv_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vint32m4_t test_vdiv_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vint32m8_t test_vdiv_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vint32m8_t test_vdiv_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vint64m1_t test_vdiv_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vint64m1_t test_vdiv_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vint64m2_t test_vdiv_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vint64m2_t test_vdiv_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vint64m4_t test_vdiv_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vint64m4_t test_vdiv_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vint64m8_t test_vdiv_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vdiv_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdiv.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vdivu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vdivu.c index 6fd5d3c3fa3ed2..729b963f9c4397 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vdivu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vdivu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vdivu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vdivu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vdivu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vdivu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vdivu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vdivu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m8_t test_vdivu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint8m8_t test_vdivu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_vdivu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m1_t test_vdivu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m2_t test_vdivu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m2_t test_vdivu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m4_t test_vdivu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint16m4_t test_vdivu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint16m8_t test_vdivu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint16m8_t test_vdivu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m1_t test_vdivu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m1_t test_vdivu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m2_t test_vdivu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m2_t test_vdivu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vdivu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vdivu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vdivu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vdivu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint64m1_t test_vdivu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_vdivu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_vdivu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vdivu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m4_t test_vdivu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m4_t test_vdivu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m8_t test_vdivu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m8_t test_vdivu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vdivu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vdivu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vdivu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vdivu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vdivu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vdivu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vdivu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vdivu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vdivu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vdivu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vdivu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vdivu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vdivu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vdivu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vdivu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vdivu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vdivu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vdivu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vdivu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vdivu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vdivu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vdivu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vdivu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vdivu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vdivu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vdivu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vdivu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vdivu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vdivu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vdivu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vdivu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vdivu_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vdivu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vrem.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vrem.c index 268a8f80ce451f..54c297e45caa24 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vrem.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vrem.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vrem_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vrem_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vrem_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vrem_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vrem_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vrem_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vrem_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vrem_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vrem_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vrem_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vrem_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vrem_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vrem_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vrem_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vrem_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vrem_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vrem_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vrem_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vrem_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vrem_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vrem_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vrem_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vrem_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vrem_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vrem_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vrem_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vrem_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vrem_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vrem_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vrem_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vrem_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vrem_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vrem_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vrem_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m2_t test_vrem_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m2_t test_vrem_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint8m4_t test_vrem_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint8m4_t test_vrem_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint8m8_t test_vrem_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint8m8_t test_vrem_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint16m1_t test_vrem_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint16m1_t test_vrem_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint16m2_t test_vrem_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint16m2_t test_vrem_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint16m4_t test_vrem_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint16m4_t test_vrem_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint16m8_t test_vrem_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint16m8_t test_vrem_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vint32m1_t test_vrem_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vint32m1_t test_vrem_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vint32m2_t test_vrem_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vint32m2_t test_vrem_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vint32m4_t test_vrem_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vint32m4_t test_vrem_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vint32m8_t test_vrem_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vint32m8_t test_vrem_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vint64m1_t test_vrem_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vint64m1_t test_vrem_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vint64m2_t test_vrem_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vint64m2_t test_vrem_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vint64m4_t test_vrem_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vint64m4_t test_vrem_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vrem_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vint64m8_t test_vrem_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vrem_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrem.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vremu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vremu.c index fbf11b5ee9d330..f31a811f545157 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vremu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-div/thead/vremu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vremu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vremu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vremu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vremu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vremu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vremu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m8_t test_vremu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint8m8_t test_vremu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_vremu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m1_t test_vremu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m2_t test_vremu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m2_t test_vremu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m4_t test_vremu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint16m4_t test_vremu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint16m8_t test_vremu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint16m8_t test_vremu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m1_t test_vremu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m1_t test_vremu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m2_t test_vremu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m2_t test_vremu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vremu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vremu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vremu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vremu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint64m1_t test_vremu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_vremu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_vremu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vremu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m4_t test_vremu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m4_t test_vremu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m8_t test_vremu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m8_t test_vremu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vremu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vremu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vremu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vremu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vremu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vremu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vremu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vremu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vremu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vremu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vremu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vremu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vremu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vremu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vremu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vremu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vremu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vremu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vremu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vremu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vremu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vremu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vremu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vremu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vremu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vremu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vremu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vremu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vremu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vremu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vremu_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vremu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vremu_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vremu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-fractional-multiply-with-rounding-and-saturation/thead/vsmul.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-fractional-multiply-with-rounding-and-saturation/thead/vsmul.c index 115f988f418469..2c0fde796aab1a 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-fractional-multiply-with-rounding-and-saturation/thead/vsmul.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-fractional-multiply-with-rounding-and-saturation/thead/vsmul.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // REQUIRES: riscv-registered-target // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ @@ -18,7 +19,7 @@ vint8m1_t test_vsmul_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -49,7 +50,7 @@ vint8m1_t test_vsmul_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -59,7 +60,7 @@ vint8m2_t test_vsmul_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -90,7 +91,7 @@ vint8m2_t test_vsmul_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -100,7 +101,7 @@ vint8m4_t test_vsmul_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -131,7 +132,7 @@ vint8m4_t test_vsmul_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -141,7 +142,7 @@ vint8m8_t test_vsmul_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -172,7 +173,7 @@ vint8m8_t test_vsmul_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -182,7 +183,7 @@ vint16m1_t test_vsmul_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -213,7 +214,7 @@ vint16m1_t test_vsmul_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -223,7 +224,7 @@ vint16m2_t test_vsmul_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -254,7 +255,7 @@ vint16m2_t test_vsmul_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, siz // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -264,7 +265,7 @@ vint16m4_t test_vsmul_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -295,7 +296,7 @@ vint16m4_t test_vsmul_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, siz // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -305,7 +306,7 @@ vint16m8_t test_vsmul_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint16m8_t test_vsmul_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, siz // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint32m1_t test_vsmul_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -377,7 +378,7 @@ vint32m1_t test_vsmul_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -387,7 +388,7 @@ vint32m2_t test_vsmul_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -418,7 +419,7 @@ vint32m2_t test_vsmul_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -428,7 +429,7 @@ vint32m4_t test_vsmul_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -459,7 +460,7 @@ vint32m4_t test_vsmul_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, siz // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -469,7 +470,7 @@ vint32m8_t test_vsmul_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -500,7 +501,7 @@ vint32m8_t test_vsmul_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, siz // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -510,7 +511,7 @@ vint64m1_t test_vsmul_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -541,7 +542,7 @@ vint64m1_t test_vsmul_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -551,7 +552,7 @@ vint64m2_t test_vsmul_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -582,7 +583,7 @@ vint64m2_t test_vsmul_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -592,7 +593,7 @@ vint64m4_t test_vsmul_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -623,7 +624,7 @@ vint64m4_t test_vsmul_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -633,7 +634,7 @@ vint64m8_t test_vsmul_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-fractional-multiply-with-rounding-and-saturation/wrappers/vsmul.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-fractional-multiply-with-rounding-and-saturation/wrappers/vsmul.c index 75e3b32307ad3b..831b421bd9f672 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-fractional-multiply-with-rounding-and-saturation/wrappers/vsmul.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-fractional-multiply-with-rounding-and-saturation/wrappers/vsmul.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // REQUIRES: riscv-registered-target // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ @@ -18,7 +19,7 @@ vint8m1_t test_vsmul_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -49,7 +50,7 @@ vint8m1_t test_vsmul_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -59,7 +60,7 @@ vint8m2_t test_vsmul_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -90,7 +91,7 @@ vint8m2_t test_vsmul_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -100,7 +101,7 @@ vint8m4_t test_vsmul_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -131,7 +132,7 @@ vint8m4_t test_vsmul_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -141,7 +142,7 @@ vint8m8_t test_vsmul_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -172,7 +173,7 @@ vint8m8_t test_vsmul_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -182,7 +183,7 @@ vint16m1_t test_vsmul_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -213,7 +214,7 @@ vint16m1_t test_vsmul_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -223,7 +224,7 @@ vint16m2_t test_vsmul_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -254,7 +255,7 @@ vint16m2_t test_vsmul_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, siz // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -264,7 +265,7 @@ vint16m4_t test_vsmul_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -295,7 +296,7 @@ vint16m4_t test_vsmul_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, siz // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -305,7 +306,7 @@ vint16m8_t test_vsmul_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint16m8_t test_vsmul_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, siz // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint32m1_t test_vsmul_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -377,7 +378,7 @@ vint32m1_t test_vsmul_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -387,7 +388,7 @@ vint32m2_t test_vsmul_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -418,7 +419,7 @@ vint32m2_t test_vsmul_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -428,7 +429,7 @@ vint32m4_t test_vsmul_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -459,7 +460,7 @@ vint32m4_t test_vsmul_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, siz // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -469,7 +470,7 @@ vint32m8_t test_vsmul_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -500,7 +501,7 @@ vint32m8_t test_vsmul_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, siz // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -510,7 +511,7 @@ vint64m1_t test_vsmul_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -541,7 +542,7 @@ vint64m1_t test_vsmul_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -551,7 +552,7 @@ vint64m2_t test_vsmul_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -582,7 +583,7 @@ vint64m2_t test_vsmul_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -592,7 +593,7 @@ vint64m4_t test_vsmul_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -623,7 +624,7 @@ vint64m4_t test_vsmul_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, si // CHECK-RV64-LABEL: define dso_local @test_vsmul_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -633,7 +634,7 @@ vint64m8_t test_vsmul_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsmul_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsmul.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vmacc.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vmacc.c index bdb45e43db6743..3d8d94367e8e0d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vmacc.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vmacc.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vmacc_vv_i8m1(vint8m1_t vd, vint8m1_t vs1, vint8m1_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vmacc_vx_i8m1(vint8m1_t vd, int8_t rs1, vint8m1_t vs2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vmacc_vv_i8m2(vint8m2_t vd, vint8m2_t vs1, vint8m2_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vmacc_vx_i8m2(vint8m2_t vd, int8_t rs1, vint8m2_t vs2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vmacc_vv_i8m4(vint8m4_t vd, vint8m4_t vs1, vint8m4_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vmacc_vx_i8m4(vint8m4_t vd, int8_t rs1, vint8m4_t vs2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vmacc_vv_i8m8(vint8m8_t vd, vint8m8_t vs1, vint8m8_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vmacc_vx_i8m8(vint8m8_t vd, int8_t rs1, vint8m8_t vs2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vmacc_vv_i16m1(vint16m1_t vd, vint16m1_t vs1, vint16m1_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vmacc_vx_i16m1(vint16m1_t vd, int16_t rs1, vint16m1_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vmacc_vv_i16m2(vint16m2_t vd, vint16m2_t vs1, vint16m2_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vmacc_vx_i16m2(vint16m2_t vd, int16_t rs1, vint16m2_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vmacc_vv_i16m4(vint16m4_t vd, vint16m4_t vs1, vint16m4_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vmacc_vx_i16m4(vint16m4_t vd, int16_t rs1, vint16m4_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vmacc_vv_i16m8(vint16m8_t vd, vint16m8_t vs1, vint16m8_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vmacc_vx_i16m8(vint16m8_t vd, int16_t rs1, vint16m8_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vmacc_vv_i32m1(vint32m1_t vd, vint32m1_t vs1, vint32m1_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vmacc_vx_i32m1(vint32m1_t vd, int32_t rs1, vint32m1_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vmacc_vv_i32m2(vint32m2_t vd, vint32m2_t vs1, vint32m2_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vmacc_vx_i32m2(vint32m2_t vd, int32_t rs1, vint32m2_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vmacc_vv_i32m4(vint32m4_t vd, vint32m4_t vs1, vint32m4_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vmacc_vx_i32m4(vint32m4_t vd, int32_t rs1, vint32m4_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vmacc_vv_i32m8(vint32m8_t vd, vint32m8_t vs1, vint32m8_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vmacc_vx_i32m8(vint32m8_t vd, int32_t rs1, vint32m8_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vmacc_vv_i64m1(vint64m1_t vd, vint64m1_t vs1, vint64m1_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vmacc_vx_i64m1(vint64m1_t vd, int64_t rs1, vint64m1_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vmacc_vv_i64m2(vint64m2_t vd, vint64m2_t vs1, vint64m2_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vmacc_vx_i64m2(vint64m2_t vd, int64_t rs1, vint64m2_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vmacc_vv_i64m4(vint64m4_t vd, vint64m4_t vs1, vint64m4_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vmacc_vx_i64m4(vint64m4_t vd, int64_t rs1, vint64m4_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vmacc_vv_i64m8(vint64m8_t vd, vint64m8_t vs1, vint64m8_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vmacc_vx_i64m8(vint64m8_t vd, int64_t rs1, vint64m8_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vmacc_vv_u8m1(vuint8m1_t vd, vuint8m1_t vs1, vuint8m1_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vmacc_vx_u8m1(vuint8m1_t vd, uint8_t rs1, vuint8m1_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vmacc_vv_u8m2(vuint8m2_t vd, vuint8m2_t vs1, vuint8m2_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vmacc_vx_u8m2(vuint8m2_t vd, uint8_t rs1, vuint8m2_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vmacc_vv_u8m4(vuint8m4_t vd, vuint8m4_t vs1, vuint8m4_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vmacc_vx_u8m4(vuint8m4_t vd, uint8_t rs1, vuint8m4_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vmacc_vv_u8m8(vuint8m8_t vd, vuint8m8_t vs1, vuint8m8_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vmacc_vx_u8m8(vuint8m8_t vd, uint8_t rs1, vuint8m8_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vmacc_vv_u16m1(vuint16m1_t vd, vuint16m1_t vs1, vuint16m1_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vmacc_vx_u16m1(vuint16m1_t vd, uint16_t rs1, vuint16m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vmacc_vv_u16m2(vuint16m2_t vd, vuint16m2_t vs1, vuint16m2_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vmacc_vx_u16m2(vuint16m2_t vd, uint16_t rs1, vuint16m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vmacc_vv_u16m4(vuint16m4_t vd, vuint16m4_t vs1, vuint16m4_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vmacc_vx_u16m4(vuint16m4_t vd, uint16_t rs1, vuint16m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vmacc_vv_u16m8(vuint16m8_t vd, vuint16m8_t vs1, vuint16m8_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vmacc_vx_u16m8(vuint16m8_t vd, uint16_t rs1, vuint16m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vmacc_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs1, vuint32m1_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vmacc_vx_u32m1(vuint32m1_t vd, uint32_t rs1, vuint32m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vmacc_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs1, vuint32m2_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vmacc_vx_u32m2(vuint32m2_t vd, uint32_t rs1, vuint32m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vmacc_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs1, vuint32m4_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vmacc_vx_u32m4(vuint32m4_t vd, uint32_t rs1, vuint32m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vmacc_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs1, vuint32m8_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vmacc_vx_u32m8(vuint32m8_t vd, uint32_t rs1, vuint32m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vmacc_vv_u64m1(vuint64m1_t vd, vuint64m1_t vs1, vuint64m1_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vmacc_vx_u64m1(vuint64m1_t vd, uint64_t rs1, vuint64m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vmacc_vv_u64m2(vuint64m2_t vd, vuint64m2_t vs1, vuint64m2_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vmacc_vx_u64m2(vuint64m2_t vd, uint64_t rs1, vuint64m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vmacc_vv_u64m4(vuint64m4_t vd, vuint64m4_t vs1, vuint64m4_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vmacc_vx_u64m4(vuint64m4_t vd, uint64_t rs1, vuint64m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vmacc_vv_u64m8(vuint64m8_t vd, vuint64m8_t vs1, vuint64m8_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -646,7 +647,7 @@ vuint64m8_t test_vmacc_vx_u64m8(vuint64m8_t vd, uint64_t rs1, vuint64m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -656,7 +657,7 @@ vint8m1_t test_vmacc_vv_i8m1_m(vbool8_t mask, vint8m1_t vd, vint8m1_t vs1, vint8 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -666,7 +667,7 @@ vint8m1_t test_vmacc_vx_i8m1_m(vbool8_t mask, vint8m1_t vd, int8_t rs1, vint8m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -676,7 +677,7 @@ vint8m2_t test_vmacc_vv_i8m2_m(vbool4_t mask, vint8m2_t vd, vint8m2_t vs1, vint8 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -686,7 +687,7 @@ vint8m2_t test_vmacc_vx_i8m2_m(vbool4_t mask, vint8m2_t vd, int8_t rs1, vint8m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -696,7 +697,7 @@ vint8m4_t test_vmacc_vv_i8m4_m(vbool2_t mask, vint8m4_t vd, vint8m4_t vs1, vint8 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -706,7 +707,7 @@ vint8m4_t test_vmacc_vx_i8m4_m(vbool2_t mask, vint8m4_t vd, int8_t rs1, vint8m4_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -716,7 +717,7 @@ vint8m8_t test_vmacc_vv_i8m8_m(vbool1_t mask, vint8m8_t vd, vint8m8_t vs1, vint8 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -726,7 +727,7 @@ vint8m8_t test_vmacc_vx_i8m8_m(vbool1_t mask, vint8m8_t vd, int8_t rs1, vint8m8_ } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -736,7 +737,7 @@ vint16m1_t test_vmacc_vv_i16m1_m(vbool16_t mask, vint16m1_t vd, vint16m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -746,7 +747,7 @@ vint16m1_t test_vmacc_vx_i16m1_m(vbool16_t mask, vint16m1_t vd, int16_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -756,7 +757,7 @@ vint16m2_t test_vmacc_vv_i16m2_m(vbool8_t mask, vint16m2_t vd, vint16m2_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -766,7 +767,7 @@ vint16m2_t test_vmacc_vx_i16m2_m(vbool8_t mask, vint16m2_t vd, int16_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -776,7 +777,7 @@ vint16m4_t test_vmacc_vv_i16m4_m(vbool4_t mask, vint16m4_t vd, vint16m4_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -786,7 +787,7 @@ vint16m4_t test_vmacc_vx_i16m4_m(vbool4_t mask, vint16m4_t vd, int16_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -796,7 +797,7 @@ vint16m8_t test_vmacc_vv_i16m8_m(vbool2_t mask, vint16m8_t vd, vint16m8_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -806,7 +807,7 @@ vint16m8_t test_vmacc_vx_i16m8_m(vbool2_t mask, vint16m8_t vd, int16_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -816,7 +817,7 @@ vint32m1_t test_vmacc_vv_i32m1_m(vbool32_t mask, vint32m1_t vd, vint32m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -826,7 +827,7 @@ vint32m1_t test_vmacc_vx_i32m1_m(vbool32_t mask, vint32m1_t vd, int32_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -836,7 +837,7 @@ vint32m2_t test_vmacc_vv_i32m2_m(vbool16_t mask, vint32m2_t vd, vint32m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -846,7 +847,7 @@ vint32m2_t test_vmacc_vx_i32m2_m(vbool16_t mask, vint32m2_t vd, int32_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -856,7 +857,7 @@ vint32m4_t test_vmacc_vv_i32m4_m(vbool8_t mask, vint32m4_t vd, vint32m4_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -866,7 +867,7 @@ vint32m4_t test_vmacc_vx_i32m4_m(vbool8_t mask, vint32m4_t vd, int32_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -876,7 +877,7 @@ vint32m8_t test_vmacc_vv_i32m8_m(vbool4_t mask, vint32m8_t vd, vint32m8_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -886,7 +887,7 @@ vint32m8_t test_vmacc_vx_i32m8_m(vbool4_t mask, vint32m8_t vd, int32_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -896,7 +897,7 @@ vint64m1_t test_vmacc_vv_i64m1_m(vbool64_t mask, vint64m1_t vd, vint64m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -906,7 +907,7 @@ vint64m1_t test_vmacc_vx_i64m1_m(vbool64_t mask, vint64m1_t vd, int64_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -916,7 +917,7 @@ vint64m2_t test_vmacc_vv_i64m2_m(vbool32_t mask, vint64m2_t vd, vint64m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -926,7 +927,7 @@ vint64m2_t test_vmacc_vx_i64m2_m(vbool32_t mask, vint64m2_t vd, int64_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -936,7 +937,7 @@ vint64m4_t test_vmacc_vv_i64m4_m(vbool16_t mask, vint64m4_t vd, vint64m4_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -946,7 +947,7 @@ vint64m4_t test_vmacc_vx_i64m4_m(vbool16_t mask, vint64m4_t vd, int64_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -956,7 +957,7 @@ vint64m8_t test_vmacc_vv_i64m8_m(vbool8_t mask, vint64m8_t vd, vint64m8_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -966,7 +967,7 @@ vint64m8_t test_vmacc_vx_i64m8_m(vbool8_t mask, vint64m8_t vd, int64_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -976,7 +977,7 @@ vuint8m1_t test_vmacc_vv_u8m1_m(vbool8_t mask, vuint8m1_t vd, vuint8m1_t vs1, vu } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -986,7 +987,7 @@ vuint8m1_t test_vmacc_vx_u8m1_m(vbool8_t mask, vuint8m1_t vd, uint8_t rs1, vuint } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -996,7 +997,7 @@ vuint8m2_t test_vmacc_vv_u8m2_m(vbool4_t mask, vuint8m2_t vd, vuint8m2_t vs1, vu } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1006,7 +1007,7 @@ vuint8m2_t test_vmacc_vx_u8m2_m(vbool4_t mask, vuint8m2_t vd, uint8_t rs1, vuint } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1016,7 +1017,7 @@ vuint8m4_t test_vmacc_vv_u8m4_m(vbool2_t mask, vuint8m4_t vd, vuint8m4_t vs1, vu } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1026,7 +1027,7 @@ vuint8m4_t test_vmacc_vx_u8m4_m(vbool2_t mask, vuint8m4_t vd, uint8_t rs1, vuint } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1036,7 +1037,7 @@ vuint8m8_t test_vmacc_vv_u8m8_m(vbool1_t mask, vuint8m8_t vd, vuint8m8_t vs1, vu } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1046,7 +1047,7 @@ vuint8m8_t test_vmacc_vx_u8m8_m(vbool1_t mask, vuint8m8_t vd, uint8_t rs1, vuint } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1056,7 +1057,7 @@ vuint16m1_t test_vmacc_vv_u16m1_m(vbool16_t mask, vuint16m1_t vd, vuint16m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1066,7 +1067,7 @@ vuint16m1_t test_vmacc_vx_u16m1_m(vbool16_t mask, vuint16m1_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1076,7 +1077,7 @@ vuint16m2_t test_vmacc_vv_u16m2_m(vbool8_t mask, vuint16m2_t vd, vuint16m2_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1086,7 +1087,7 @@ vuint16m2_t test_vmacc_vx_u16m2_m(vbool8_t mask, vuint16m2_t vd, uint16_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1096,7 +1097,7 @@ vuint16m4_t test_vmacc_vv_u16m4_m(vbool4_t mask, vuint16m4_t vd, vuint16m4_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1106,7 +1107,7 @@ vuint16m4_t test_vmacc_vx_u16m4_m(vbool4_t mask, vuint16m4_t vd, uint16_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1116,7 +1117,7 @@ vuint16m8_t test_vmacc_vv_u16m8_m(vbool2_t mask, vuint16m8_t vd, vuint16m8_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1126,7 +1127,7 @@ vuint16m8_t test_vmacc_vx_u16m8_m(vbool2_t mask, vuint16m8_t vd, uint16_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1136,7 +1137,7 @@ vuint32m1_t test_vmacc_vv_u32m1_m(vbool32_t mask, vuint32m1_t vd, vuint32m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1146,7 +1147,7 @@ vuint32m1_t test_vmacc_vx_u32m1_m(vbool32_t mask, vuint32m1_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1156,7 +1157,7 @@ vuint32m2_t test_vmacc_vv_u32m2_m(vbool16_t mask, vuint32m2_t vd, vuint32m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1166,7 +1167,7 @@ vuint32m2_t test_vmacc_vx_u32m2_m(vbool16_t mask, vuint32m2_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1176,7 +1177,7 @@ vuint32m4_t test_vmacc_vv_u32m4_m(vbool8_t mask, vuint32m4_t vd, vuint32m4_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1186,7 +1187,7 @@ vuint32m4_t test_vmacc_vx_u32m4_m(vbool8_t mask, vuint32m4_t vd, uint32_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1196,7 +1197,7 @@ vuint32m8_t test_vmacc_vv_u32m8_m(vbool4_t mask, vuint32m8_t vd, vuint32m8_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1206,7 +1207,7 @@ vuint32m8_t test_vmacc_vx_u32m8_m(vbool4_t mask, vuint32m8_t vd, uint32_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1216,7 +1217,7 @@ vuint64m1_t test_vmacc_vv_u64m1_m(vbool64_t mask, vuint64m1_t vd, vuint64m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1226,7 +1227,7 @@ vuint64m1_t test_vmacc_vx_u64m1_m(vbool64_t mask, vuint64m1_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1236,7 +1237,7 @@ vuint64m2_t test_vmacc_vv_u64m2_m(vbool32_t mask, vuint64m2_t vd, vuint64m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1246,7 +1247,7 @@ vuint64m2_t test_vmacc_vx_u64m2_m(vbool32_t mask, vuint64m2_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1256,7 +1257,7 @@ vuint64m4_t test_vmacc_vv_u64m4_m(vbool16_t mask, vuint64m4_t vd, vuint64m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1266,7 +1267,7 @@ vuint64m4_t test_vmacc_vx_u64m4_m(vbool16_t mask, vuint64m4_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1276,7 +1277,7 @@ vuint64m8_t test_vmacc_vv_u64m8_m(vbool8_t mask, vuint64m8_t vd, vuint64m8_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmacc_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmacc.mask.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vmadd.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vmadd.c index 17ca89d3e796e5..884a30e25321cf 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vmadd.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vmadd.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vmadd_vv_i8m1(vint8m1_t vd, vint8m1_t vs1, vint8m1_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vmadd_vx_i8m1(vint8m1_t vd, int8_t rs1, vint8m1_t vs2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vmadd_vv_i8m2(vint8m2_t vd, vint8m2_t vs1, vint8m2_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vmadd_vx_i8m2(vint8m2_t vd, int8_t rs1, vint8m2_t vs2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vmadd_vv_i8m4(vint8m4_t vd, vint8m4_t vs1, vint8m4_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vmadd_vx_i8m4(vint8m4_t vd, int8_t rs1, vint8m4_t vs2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vmadd_vv_i8m8(vint8m8_t vd, vint8m8_t vs1, vint8m8_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vmadd_vx_i8m8(vint8m8_t vd, int8_t rs1, vint8m8_t vs2, size_t vl) } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vmadd_vv_i16m1(vint16m1_t vd, vint16m1_t vs1, vint16m1_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vmadd_vx_i16m1(vint16m1_t vd, int16_t rs1, vint16m1_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vmadd_vv_i16m2(vint16m2_t vd, vint16m2_t vs1, vint16m2_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vmadd_vx_i16m2(vint16m2_t vd, int16_t rs1, vint16m2_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vmadd_vv_i16m4(vint16m4_t vd, vint16m4_t vs1, vint16m4_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vmadd_vx_i16m4(vint16m4_t vd, int16_t rs1, vint16m4_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vmadd_vv_i16m8(vint16m8_t vd, vint16m8_t vs1, vint16m8_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vmadd_vx_i16m8(vint16m8_t vd, int16_t rs1, vint16m8_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vmadd_vv_i32m1(vint32m1_t vd, vint32m1_t vs1, vint32m1_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vmadd_vx_i32m1(vint32m1_t vd, int32_t rs1, vint32m1_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vmadd_vv_i32m2(vint32m2_t vd, vint32m2_t vs1, vint32m2_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vmadd_vx_i32m2(vint32m2_t vd, int32_t rs1, vint32m2_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vmadd_vv_i32m4(vint32m4_t vd, vint32m4_t vs1, vint32m4_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vmadd_vx_i32m4(vint32m4_t vd, int32_t rs1, vint32m4_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vmadd_vv_i32m8(vint32m8_t vd, vint32m8_t vs1, vint32m8_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vmadd_vx_i32m8(vint32m8_t vd, int32_t rs1, vint32m8_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vmadd_vv_i64m1(vint64m1_t vd, vint64m1_t vs1, vint64m1_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vmadd_vx_i64m1(vint64m1_t vd, int64_t rs1, vint64m1_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vmadd_vv_i64m2(vint64m2_t vd, vint64m2_t vs1, vint64m2_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vmadd_vx_i64m2(vint64m2_t vd, int64_t rs1, vint64m2_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vmadd_vv_i64m4(vint64m4_t vd, vint64m4_t vs1, vint64m4_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vmadd_vx_i64m4(vint64m4_t vd, int64_t rs1, vint64m4_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vmadd_vv_i64m8(vint64m8_t vd, vint64m8_t vs1, vint64m8_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vmadd_vx_i64m8(vint64m8_t vd, int64_t rs1, vint64m8_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vmadd_vv_u8m1(vuint8m1_t vd, vuint8m1_t vs1, vuint8m1_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vmadd_vx_u8m1(vuint8m1_t vd, uint8_t rs1, vuint8m1_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vmadd_vv_u8m2(vuint8m2_t vd, vuint8m2_t vs1, vuint8m2_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vmadd_vx_u8m2(vuint8m2_t vd, uint8_t rs1, vuint8m2_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vmadd_vv_u8m4(vuint8m4_t vd, vuint8m4_t vs1, vuint8m4_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vmadd_vx_u8m4(vuint8m4_t vd, uint8_t rs1, vuint8m4_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vmadd_vv_u8m8(vuint8m8_t vd, vuint8m8_t vs1, vuint8m8_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vmadd_vx_u8m8(vuint8m8_t vd, uint8_t rs1, vuint8m8_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vmadd_vv_u16m1(vuint16m1_t vd, vuint16m1_t vs1, vuint16m1_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vmadd_vx_u16m1(vuint16m1_t vd, uint16_t rs1, vuint16m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vmadd_vv_u16m2(vuint16m2_t vd, vuint16m2_t vs1, vuint16m2_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vmadd_vx_u16m2(vuint16m2_t vd, uint16_t rs1, vuint16m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vmadd_vv_u16m4(vuint16m4_t vd, vuint16m4_t vs1, vuint16m4_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vmadd_vx_u16m4(vuint16m4_t vd, uint16_t rs1, vuint16m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vmadd_vv_u16m8(vuint16m8_t vd, vuint16m8_t vs1, vuint16m8_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vmadd_vx_u16m8(vuint16m8_t vd, uint16_t rs1, vuint16m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vmadd_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs1, vuint32m1_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vmadd_vx_u32m1(vuint32m1_t vd, uint32_t rs1, vuint32m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vmadd_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs1, vuint32m2_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vmadd_vx_u32m2(vuint32m2_t vd, uint32_t rs1, vuint32m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vmadd_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs1, vuint32m4_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vmadd_vx_u32m4(vuint32m4_t vd, uint32_t rs1, vuint32m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vmadd_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs1, vuint32m8_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vmadd_vx_u32m8(vuint32m8_t vd, uint32_t rs1, vuint32m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vmadd_vv_u64m1(vuint64m1_t vd, vuint64m1_t vs1, vuint64m1_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vmadd_vx_u64m1(vuint64m1_t vd, uint64_t rs1, vuint64m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vmadd_vv_u64m2(vuint64m2_t vd, vuint64m2_t vs1, vuint64m2_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vmadd_vx_u64m2(vuint64m2_t vd, uint64_t rs1, vuint64m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vmadd_vv_u64m4(vuint64m4_t vd, vuint64m4_t vs1, vuint64m4_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vmadd_vx_u64m4(vuint64m4_t vd, uint64_t rs1, vuint64m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vmadd_vv_u64m8(vuint64m8_t vd, vuint64m8_t vs1, vuint64m8_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -646,7 +647,7 @@ vuint64m8_t test_vmadd_vx_u64m8(vuint64m8_t vd, uint64_t rs1, vuint64m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -656,7 +657,7 @@ vint8m1_t test_vmadd_vv_i8m1_m(vbool8_t mask, vint8m1_t vd, vint8m1_t vs1, vint8 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -666,7 +667,7 @@ vint8m1_t test_vmadd_vx_i8m1_m(vbool8_t mask, vint8m1_t vd, int8_t rs1, vint8m1_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -676,7 +677,7 @@ vint8m2_t test_vmadd_vv_i8m2_m(vbool4_t mask, vint8m2_t vd, vint8m2_t vs1, vint8 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -686,7 +687,7 @@ vint8m2_t test_vmadd_vx_i8m2_m(vbool4_t mask, vint8m2_t vd, int8_t rs1, vint8m2_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -696,7 +697,7 @@ vint8m4_t test_vmadd_vv_i8m4_m(vbool2_t mask, vint8m4_t vd, vint8m4_t vs1, vint8 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -706,7 +707,7 @@ vint8m4_t test_vmadd_vx_i8m4_m(vbool2_t mask, vint8m4_t vd, int8_t rs1, vint8m4_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -716,7 +717,7 @@ vint8m8_t test_vmadd_vv_i8m8_m(vbool1_t mask, vint8m8_t vd, vint8m8_t vs1, vint8 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -726,7 +727,7 @@ vint8m8_t test_vmadd_vx_i8m8_m(vbool1_t mask, vint8m8_t vd, int8_t rs1, vint8m8_ } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -736,7 +737,7 @@ vint16m1_t test_vmadd_vv_i16m1_m(vbool16_t mask, vint16m1_t vd, vint16m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -746,7 +747,7 @@ vint16m1_t test_vmadd_vx_i16m1_m(vbool16_t mask, vint16m1_t vd, int16_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -756,7 +757,7 @@ vint16m2_t test_vmadd_vv_i16m2_m(vbool8_t mask, vint16m2_t vd, vint16m2_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -766,7 +767,7 @@ vint16m2_t test_vmadd_vx_i16m2_m(vbool8_t mask, vint16m2_t vd, int16_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -776,7 +777,7 @@ vint16m4_t test_vmadd_vv_i16m4_m(vbool4_t mask, vint16m4_t vd, vint16m4_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -786,7 +787,7 @@ vint16m4_t test_vmadd_vx_i16m4_m(vbool4_t mask, vint16m4_t vd, int16_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -796,7 +797,7 @@ vint16m8_t test_vmadd_vv_i16m8_m(vbool2_t mask, vint16m8_t vd, vint16m8_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -806,7 +807,7 @@ vint16m8_t test_vmadd_vx_i16m8_m(vbool2_t mask, vint16m8_t vd, int16_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -816,7 +817,7 @@ vint32m1_t test_vmadd_vv_i32m1_m(vbool32_t mask, vint32m1_t vd, vint32m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -826,7 +827,7 @@ vint32m1_t test_vmadd_vx_i32m1_m(vbool32_t mask, vint32m1_t vd, int32_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -836,7 +837,7 @@ vint32m2_t test_vmadd_vv_i32m2_m(vbool16_t mask, vint32m2_t vd, vint32m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -846,7 +847,7 @@ vint32m2_t test_vmadd_vx_i32m2_m(vbool16_t mask, vint32m2_t vd, int32_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -856,7 +857,7 @@ vint32m4_t test_vmadd_vv_i32m4_m(vbool8_t mask, vint32m4_t vd, vint32m4_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -866,7 +867,7 @@ vint32m4_t test_vmadd_vx_i32m4_m(vbool8_t mask, vint32m4_t vd, int32_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -876,7 +877,7 @@ vint32m8_t test_vmadd_vv_i32m8_m(vbool4_t mask, vint32m8_t vd, vint32m8_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -886,7 +887,7 @@ vint32m8_t test_vmadd_vx_i32m8_m(vbool4_t mask, vint32m8_t vd, int32_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -896,7 +897,7 @@ vint64m1_t test_vmadd_vv_i64m1_m(vbool64_t mask, vint64m1_t vd, vint64m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -906,7 +907,7 @@ vint64m1_t test_vmadd_vx_i64m1_m(vbool64_t mask, vint64m1_t vd, int64_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -916,7 +917,7 @@ vint64m2_t test_vmadd_vv_i64m2_m(vbool32_t mask, vint64m2_t vd, vint64m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -926,7 +927,7 @@ vint64m2_t test_vmadd_vx_i64m2_m(vbool32_t mask, vint64m2_t vd, int64_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -936,7 +937,7 @@ vint64m4_t test_vmadd_vv_i64m4_m(vbool16_t mask, vint64m4_t vd, vint64m4_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -946,7 +947,7 @@ vint64m4_t test_vmadd_vx_i64m4_m(vbool16_t mask, vint64m4_t vd, int64_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -956,7 +957,7 @@ vint64m8_t test_vmadd_vv_i64m8_m(vbool8_t mask, vint64m8_t vd, vint64m8_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -966,7 +967,7 @@ vint64m8_t test_vmadd_vx_i64m8_m(vbool8_t mask, vint64m8_t vd, int64_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -976,7 +977,7 @@ vuint8m1_t test_vmadd_vv_u8m1_m(vbool8_t mask, vuint8m1_t vd, vuint8m1_t vs1, vu } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -986,7 +987,7 @@ vuint8m1_t test_vmadd_vx_u8m1_m(vbool8_t mask, vuint8m1_t vd, uint8_t rs1, vuint } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -996,7 +997,7 @@ vuint8m2_t test_vmadd_vv_u8m2_m(vbool4_t mask, vuint8m2_t vd, vuint8m2_t vs1, vu } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1006,7 +1007,7 @@ vuint8m2_t test_vmadd_vx_u8m2_m(vbool4_t mask, vuint8m2_t vd, uint8_t rs1, vuint } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1016,7 +1017,7 @@ vuint8m4_t test_vmadd_vv_u8m4_m(vbool2_t mask, vuint8m4_t vd, vuint8m4_t vs1, vu } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1026,7 +1027,7 @@ vuint8m4_t test_vmadd_vx_u8m4_m(vbool2_t mask, vuint8m4_t vd, uint8_t rs1, vuint } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1036,7 +1037,7 @@ vuint8m8_t test_vmadd_vv_u8m8_m(vbool1_t mask, vuint8m8_t vd, vuint8m8_t vs1, vu } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1046,7 +1047,7 @@ vuint8m8_t test_vmadd_vx_u8m8_m(vbool1_t mask, vuint8m8_t vd, uint8_t rs1, vuint } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1056,7 +1057,7 @@ vuint16m1_t test_vmadd_vv_u16m1_m(vbool16_t mask, vuint16m1_t vd, vuint16m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1066,7 +1067,7 @@ vuint16m1_t test_vmadd_vx_u16m1_m(vbool16_t mask, vuint16m1_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1076,7 +1077,7 @@ vuint16m2_t test_vmadd_vv_u16m2_m(vbool8_t mask, vuint16m2_t vd, vuint16m2_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1086,7 +1087,7 @@ vuint16m2_t test_vmadd_vx_u16m2_m(vbool8_t mask, vuint16m2_t vd, uint16_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1096,7 +1097,7 @@ vuint16m4_t test_vmadd_vv_u16m4_m(vbool4_t mask, vuint16m4_t vd, vuint16m4_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1106,7 +1107,7 @@ vuint16m4_t test_vmadd_vx_u16m4_m(vbool4_t mask, vuint16m4_t vd, uint16_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1116,7 +1117,7 @@ vuint16m8_t test_vmadd_vv_u16m8_m(vbool2_t mask, vuint16m8_t vd, vuint16m8_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1126,7 +1127,7 @@ vuint16m8_t test_vmadd_vx_u16m8_m(vbool2_t mask, vuint16m8_t vd, uint16_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1136,7 +1137,7 @@ vuint32m1_t test_vmadd_vv_u32m1_m(vbool32_t mask, vuint32m1_t vd, vuint32m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1146,7 +1147,7 @@ vuint32m1_t test_vmadd_vx_u32m1_m(vbool32_t mask, vuint32m1_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1156,7 +1157,7 @@ vuint32m2_t test_vmadd_vv_u32m2_m(vbool16_t mask, vuint32m2_t vd, vuint32m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1166,7 +1167,7 @@ vuint32m2_t test_vmadd_vx_u32m2_m(vbool16_t mask, vuint32m2_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1176,7 +1177,7 @@ vuint32m4_t test_vmadd_vv_u32m4_m(vbool8_t mask, vuint32m4_t vd, vuint32m4_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1186,7 +1187,7 @@ vuint32m4_t test_vmadd_vx_u32m4_m(vbool8_t mask, vuint32m4_t vd, uint32_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1196,7 +1197,7 @@ vuint32m8_t test_vmadd_vv_u32m8_m(vbool4_t mask, vuint32m8_t vd, vuint32m8_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1206,7 +1207,7 @@ vuint32m8_t test_vmadd_vx_u32m8_m(vbool4_t mask, vuint32m8_t vd, uint32_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1216,7 +1217,7 @@ vuint64m1_t test_vmadd_vv_u64m1_m(vbool64_t mask, vuint64m1_t vd, vuint64m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1226,7 +1227,7 @@ vuint64m1_t test_vmadd_vx_u64m1_m(vbool64_t mask, vuint64m1_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1236,7 +1237,7 @@ vuint64m2_t test_vmadd_vv_u64m2_m(vbool32_t mask, vuint64m2_t vd, vuint64m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1246,7 +1247,7 @@ vuint64m2_t test_vmadd_vx_u64m2_m(vbool32_t mask, vuint64m2_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1256,7 +1257,7 @@ vuint64m4_t test_vmadd_vv_u64m4_m(vbool16_t mask, vuint64m4_t vd, vuint64m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1266,7 +1267,7 @@ vuint64m4_t test_vmadd_vx_u64m4_m(vbool16_t mask, vuint64m4_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1276,7 +1277,7 @@ vuint64m8_t test_vmadd_vv_u64m8_m(vbool8_t mask, vuint64m8_t vd, vuint64m8_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vmadd_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmadd.mask.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vnmsac.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vnmsac.c index 109d8190278301..75aba46b4f81ab 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vnmsac.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vnmsac.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vnmsac_vv_i8m1(vint8m1_t vd, vint8m1_t vs1, vint8m1_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vnmsac_vx_i8m1(vint8m1_t vd, int8_t rs1, vint8m1_t vs2, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vnmsac_vv_i8m2(vint8m2_t vd, vint8m2_t vs1, vint8m2_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vnmsac_vx_i8m2(vint8m2_t vd, int8_t rs1, vint8m2_t vs2, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vnmsac_vv_i8m4(vint8m4_t vd, vint8m4_t vs1, vint8m4_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vnmsac_vx_i8m4(vint8m4_t vd, int8_t rs1, vint8m4_t vs2, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vnmsac_vv_i8m8(vint8m8_t vd, vint8m8_t vs1, vint8m8_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vnmsac_vx_i8m8(vint8m8_t vd, int8_t rs1, vint8m8_t vs2, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vnmsac_vv_i16m1(vint16m1_t vd, vint16m1_t vs1, vint16m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vnmsac_vx_i16m1(vint16m1_t vd, int16_t rs1, vint16m1_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vnmsac_vv_i16m2(vint16m2_t vd, vint16m2_t vs1, vint16m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vnmsac_vx_i16m2(vint16m2_t vd, int16_t rs1, vint16m2_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vnmsac_vv_i16m4(vint16m4_t vd, vint16m4_t vs1, vint16m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vnmsac_vx_i16m4(vint16m4_t vd, int16_t rs1, vint16m4_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vnmsac_vv_i16m8(vint16m8_t vd, vint16m8_t vs1, vint16m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vnmsac_vx_i16m8(vint16m8_t vd, int16_t rs1, vint16m8_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vnmsac_vv_i32m1(vint32m1_t vd, vint32m1_t vs1, vint32m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vnmsac_vx_i32m1(vint32m1_t vd, int32_t rs1, vint32m1_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vnmsac_vv_i32m2(vint32m2_t vd, vint32m2_t vs1, vint32m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vnmsac_vx_i32m2(vint32m2_t vd, int32_t rs1, vint32m2_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vnmsac_vv_i32m4(vint32m4_t vd, vint32m4_t vs1, vint32m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vnmsac_vx_i32m4(vint32m4_t vd, int32_t rs1, vint32m4_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vnmsac_vv_i32m8(vint32m8_t vd, vint32m8_t vs1, vint32m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vnmsac_vx_i32m8(vint32m8_t vd, int32_t rs1, vint32m8_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vnmsac_vv_i64m1(vint64m1_t vd, vint64m1_t vs1, vint64m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vnmsac_vx_i64m1(vint64m1_t vd, int64_t rs1, vint64m1_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vnmsac_vv_i64m2(vint64m2_t vd, vint64m2_t vs1, vint64m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vnmsac_vx_i64m2(vint64m2_t vd, int64_t rs1, vint64m2_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vnmsac_vv_i64m4(vint64m4_t vd, vint64m4_t vs1, vint64m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vnmsac_vx_i64m4(vint64m4_t vd, int64_t rs1, vint64m4_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vnmsac_vv_i64m8(vint64m8_t vd, vint64m8_t vs1, vint64m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vnmsac_vx_i64m8(vint64m8_t vd, int64_t rs1, vint64m8_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vnmsac_vv_u8m1(vuint8m1_t vd, vuint8m1_t vs1, vuint8m1_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vnmsac_vx_u8m1(vuint8m1_t vd, uint8_t rs1, vuint8m1_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vnmsac_vv_u8m2(vuint8m2_t vd, vuint8m2_t vs1, vuint8m2_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vnmsac_vx_u8m2(vuint8m2_t vd, uint8_t rs1, vuint8m2_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vnmsac_vv_u8m4(vuint8m4_t vd, vuint8m4_t vs1, vuint8m4_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vnmsac_vx_u8m4(vuint8m4_t vd, uint8_t rs1, vuint8m4_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vnmsac_vv_u8m8(vuint8m8_t vd, vuint8m8_t vs1, vuint8m8_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vnmsac_vx_u8m8(vuint8m8_t vd, uint8_t rs1, vuint8m8_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vnmsac_vv_u16m1(vuint16m1_t vd, vuint16m1_t vs1, vuint16m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vnmsac_vx_u16m1(vuint16m1_t vd, uint16_t rs1, vuint16m1_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vnmsac_vv_u16m2(vuint16m2_t vd, vuint16m2_t vs1, vuint16m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vnmsac_vx_u16m2(vuint16m2_t vd, uint16_t rs1, vuint16m2_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vnmsac_vv_u16m4(vuint16m4_t vd, vuint16m4_t vs1, vuint16m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vnmsac_vx_u16m4(vuint16m4_t vd, uint16_t rs1, vuint16m4_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vnmsac_vv_u16m8(vuint16m8_t vd, vuint16m8_t vs1, vuint16m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vnmsac_vx_u16m8(vuint16m8_t vd, uint16_t rs1, vuint16m8_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vnmsac_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs1, vuint32m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vnmsac_vx_u32m1(vuint32m1_t vd, uint32_t rs1, vuint32m1_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vnmsac_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs1, vuint32m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vnmsac_vx_u32m2(vuint32m2_t vd, uint32_t rs1, vuint32m2_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vnmsac_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs1, vuint32m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vnmsac_vx_u32m4(vuint32m4_t vd, uint32_t rs1, vuint32m4_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vnmsac_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs1, vuint32m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vnmsac_vx_u32m8(vuint32m8_t vd, uint32_t rs1, vuint32m8_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vnmsac_vv_u64m1(vuint64m1_t vd, vuint64m1_t vs1, vuint64m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vnmsac_vx_u64m1(vuint64m1_t vd, uint64_t rs1, vuint64m1_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vnmsac_vv_u64m2(vuint64m2_t vd, vuint64m2_t vs1, vuint64m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vnmsac_vx_u64m2(vuint64m2_t vd, uint64_t rs1, vuint64m2_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vnmsac_vv_u64m4(vuint64m4_t vd, vuint64m4_t vs1, vuint64m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vnmsac_vx_u64m4(vuint64m4_t vd, uint64_t rs1, vuint64m4_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vnmsac_vv_u64m8(vuint64m8_t vd, vuint64m8_t vs1, vuint64m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -646,7 +647,7 @@ vuint64m8_t test_vnmsac_vx_u64m8(vuint64m8_t vd, uint64_t rs1, vuint64m8_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -656,7 +657,7 @@ vint8m1_t test_vnmsac_vv_i8m1_m(vbool8_t mask, vint8m1_t vd, vint8m1_t vs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -666,7 +667,7 @@ vint8m1_t test_vnmsac_vx_i8m1_m(vbool8_t mask, vint8m1_t vd, int8_t rs1, vint8m1 } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -676,7 +677,7 @@ vint8m2_t test_vnmsac_vv_i8m2_m(vbool4_t mask, vint8m2_t vd, vint8m2_t vs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -686,7 +687,7 @@ vint8m2_t test_vnmsac_vx_i8m2_m(vbool4_t mask, vint8m2_t vd, int8_t rs1, vint8m2 } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -696,7 +697,7 @@ vint8m4_t test_vnmsac_vv_i8m4_m(vbool2_t mask, vint8m4_t vd, vint8m4_t vs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -706,7 +707,7 @@ vint8m4_t test_vnmsac_vx_i8m4_m(vbool2_t mask, vint8m4_t vd, int8_t rs1, vint8m4 } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -716,7 +717,7 @@ vint8m8_t test_vnmsac_vv_i8m8_m(vbool1_t mask, vint8m8_t vd, vint8m8_t vs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -726,7 +727,7 @@ vint8m8_t test_vnmsac_vx_i8m8_m(vbool1_t mask, vint8m8_t vd, int8_t rs1, vint8m8 } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -736,7 +737,7 @@ vint16m1_t test_vnmsac_vv_i16m1_m(vbool16_t mask, vint16m1_t vd, vint16m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -746,7 +747,7 @@ vint16m1_t test_vnmsac_vx_i16m1_m(vbool16_t mask, vint16m1_t vd, int16_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -756,7 +757,7 @@ vint16m2_t test_vnmsac_vv_i16m2_m(vbool8_t mask, vint16m2_t vd, vint16m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -766,7 +767,7 @@ vint16m2_t test_vnmsac_vx_i16m2_m(vbool8_t mask, vint16m2_t vd, int16_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -776,7 +777,7 @@ vint16m4_t test_vnmsac_vv_i16m4_m(vbool4_t mask, vint16m4_t vd, vint16m4_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -786,7 +787,7 @@ vint16m4_t test_vnmsac_vx_i16m4_m(vbool4_t mask, vint16m4_t vd, int16_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -796,7 +797,7 @@ vint16m8_t test_vnmsac_vv_i16m8_m(vbool2_t mask, vint16m8_t vd, vint16m8_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -806,7 +807,7 @@ vint16m8_t test_vnmsac_vx_i16m8_m(vbool2_t mask, vint16m8_t vd, int16_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -816,7 +817,7 @@ vint32m1_t test_vnmsac_vv_i32m1_m(vbool32_t mask, vint32m1_t vd, vint32m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -826,7 +827,7 @@ vint32m1_t test_vnmsac_vx_i32m1_m(vbool32_t mask, vint32m1_t vd, int32_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -836,7 +837,7 @@ vint32m2_t test_vnmsac_vv_i32m2_m(vbool16_t mask, vint32m2_t vd, vint32m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -846,7 +847,7 @@ vint32m2_t test_vnmsac_vx_i32m2_m(vbool16_t mask, vint32m2_t vd, int32_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -856,7 +857,7 @@ vint32m4_t test_vnmsac_vv_i32m4_m(vbool8_t mask, vint32m4_t vd, vint32m4_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -866,7 +867,7 @@ vint32m4_t test_vnmsac_vx_i32m4_m(vbool8_t mask, vint32m4_t vd, int32_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -876,7 +877,7 @@ vint32m8_t test_vnmsac_vv_i32m8_m(vbool4_t mask, vint32m8_t vd, vint32m8_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -886,7 +887,7 @@ vint32m8_t test_vnmsac_vx_i32m8_m(vbool4_t mask, vint32m8_t vd, int32_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -896,7 +897,7 @@ vint64m1_t test_vnmsac_vv_i64m1_m(vbool64_t mask, vint64m1_t vd, vint64m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -906,7 +907,7 @@ vint64m1_t test_vnmsac_vx_i64m1_m(vbool64_t mask, vint64m1_t vd, int64_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -916,7 +917,7 @@ vint64m2_t test_vnmsac_vv_i64m2_m(vbool32_t mask, vint64m2_t vd, vint64m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -926,7 +927,7 @@ vint64m2_t test_vnmsac_vx_i64m2_m(vbool32_t mask, vint64m2_t vd, int64_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -936,7 +937,7 @@ vint64m4_t test_vnmsac_vv_i64m4_m(vbool16_t mask, vint64m4_t vd, vint64m4_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -946,7 +947,7 @@ vint64m4_t test_vnmsac_vx_i64m4_m(vbool16_t mask, vint64m4_t vd, int64_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -956,7 +957,7 @@ vint64m8_t test_vnmsac_vv_i64m8_m(vbool8_t mask, vint64m8_t vd, vint64m8_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -966,7 +967,7 @@ vint64m8_t test_vnmsac_vx_i64m8_m(vbool8_t mask, vint64m8_t vd, int64_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -976,7 +977,7 @@ vuint8m1_t test_vnmsac_vv_u8m1_m(vbool8_t mask, vuint8m1_t vd, vuint8m1_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -986,7 +987,7 @@ vuint8m1_t test_vnmsac_vx_u8m1_m(vbool8_t mask, vuint8m1_t vd, uint8_t rs1, vuin } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -996,7 +997,7 @@ vuint8m2_t test_vnmsac_vv_u8m2_m(vbool4_t mask, vuint8m2_t vd, vuint8m2_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1006,7 +1007,7 @@ vuint8m2_t test_vnmsac_vx_u8m2_m(vbool4_t mask, vuint8m2_t vd, uint8_t rs1, vuin } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1016,7 +1017,7 @@ vuint8m4_t test_vnmsac_vv_u8m4_m(vbool2_t mask, vuint8m4_t vd, vuint8m4_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1026,7 +1027,7 @@ vuint8m4_t test_vnmsac_vx_u8m4_m(vbool2_t mask, vuint8m4_t vd, uint8_t rs1, vuin } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1036,7 +1037,7 @@ vuint8m8_t test_vnmsac_vv_u8m8_m(vbool1_t mask, vuint8m8_t vd, vuint8m8_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1046,7 +1047,7 @@ vuint8m8_t test_vnmsac_vx_u8m8_m(vbool1_t mask, vuint8m8_t vd, uint8_t rs1, vuin } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1056,7 +1057,7 @@ vuint16m1_t test_vnmsac_vv_u16m1_m(vbool16_t mask, vuint16m1_t vd, vuint16m1_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1066,7 +1067,7 @@ vuint16m1_t test_vnmsac_vx_u16m1_m(vbool16_t mask, vuint16m1_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1076,7 +1077,7 @@ vuint16m2_t test_vnmsac_vv_u16m2_m(vbool8_t mask, vuint16m2_t vd, vuint16m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1086,7 +1087,7 @@ vuint16m2_t test_vnmsac_vx_u16m2_m(vbool8_t mask, vuint16m2_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1096,7 +1097,7 @@ vuint16m4_t test_vnmsac_vv_u16m4_m(vbool4_t mask, vuint16m4_t vd, vuint16m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1106,7 +1107,7 @@ vuint16m4_t test_vnmsac_vx_u16m4_m(vbool4_t mask, vuint16m4_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1116,7 +1117,7 @@ vuint16m8_t test_vnmsac_vv_u16m8_m(vbool2_t mask, vuint16m8_t vd, vuint16m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1126,7 +1127,7 @@ vuint16m8_t test_vnmsac_vx_u16m8_m(vbool2_t mask, vuint16m8_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1136,7 +1137,7 @@ vuint32m1_t test_vnmsac_vv_u32m1_m(vbool32_t mask, vuint32m1_t vd, vuint32m1_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1146,7 +1147,7 @@ vuint32m1_t test_vnmsac_vx_u32m1_m(vbool32_t mask, vuint32m1_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1156,7 +1157,7 @@ vuint32m2_t test_vnmsac_vv_u32m2_m(vbool16_t mask, vuint32m2_t vd, vuint32m2_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1166,7 +1167,7 @@ vuint32m2_t test_vnmsac_vx_u32m2_m(vbool16_t mask, vuint32m2_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1176,7 +1177,7 @@ vuint32m4_t test_vnmsac_vv_u32m4_m(vbool8_t mask, vuint32m4_t vd, vuint32m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1186,7 +1187,7 @@ vuint32m4_t test_vnmsac_vx_u32m4_m(vbool8_t mask, vuint32m4_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1196,7 +1197,7 @@ vuint32m8_t test_vnmsac_vv_u32m8_m(vbool4_t mask, vuint32m8_t vd, vuint32m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1206,7 +1207,7 @@ vuint32m8_t test_vnmsac_vx_u32m8_m(vbool4_t mask, vuint32m8_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1216,7 +1217,7 @@ vuint64m1_t test_vnmsac_vv_u64m1_m(vbool64_t mask, vuint64m1_t vd, vuint64m1_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1226,7 +1227,7 @@ vuint64m1_t test_vnmsac_vx_u64m1_m(vbool64_t mask, vuint64m1_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1236,7 +1237,7 @@ vuint64m2_t test_vnmsac_vv_u64m2_m(vbool32_t mask, vuint64m2_t vd, vuint64m2_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1246,7 +1247,7 @@ vuint64m2_t test_vnmsac_vx_u64m2_m(vbool32_t mask, vuint64m2_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1256,7 +1257,7 @@ vuint64m4_t test_vnmsac_vv_u64m4_m(vbool16_t mask, vuint64m4_t vd, vuint64m4_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1266,7 +1267,7 @@ vuint64m4_t test_vnmsac_vx_u64m4_m(vbool16_t mask, vuint64m4_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1276,7 +1277,7 @@ vuint64m8_t test_vnmsac_vv_u64m8_m(vbool8_t mask, vuint64m8_t vd, vuint64m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsac_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsac.mask.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vnmsub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vnmsub.c index cf817bf636d4aa..88e6c74e1f9ed2 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vnmsub.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul-add/thead/vnmsub.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vnmsub_vv_i8m1(vint8m1_t vd, vint8m1_t vs1, vint8m1_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vnmsub_vx_i8m1(vint8m1_t vd, int8_t rs1, vint8m1_t vs2, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vnmsub_vv_i8m2(vint8m2_t vd, vint8m2_t vs1, vint8m2_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vnmsub_vx_i8m2(vint8m2_t vd, int8_t rs1, vint8m2_t vs2, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vnmsub_vv_i8m4(vint8m4_t vd, vint8m4_t vs1, vint8m4_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vnmsub_vx_i8m4(vint8m4_t vd, int8_t rs1, vint8m4_t vs2, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vnmsub_vv_i8m8(vint8m8_t vd, vint8m8_t vs1, vint8m8_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vnmsub_vx_i8m8(vint8m8_t vd, int8_t rs1, vint8m8_t vs2, size_t vl } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vnmsub_vv_i16m1(vint16m1_t vd, vint16m1_t vs1, vint16m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vnmsub_vx_i16m1(vint16m1_t vd, int16_t rs1, vint16m1_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vnmsub_vv_i16m2(vint16m2_t vd, vint16m2_t vs1, vint16m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vnmsub_vx_i16m2(vint16m2_t vd, int16_t rs1, vint16m2_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vnmsub_vv_i16m4(vint16m4_t vd, vint16m4_t vs1, vint16m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vnmsub_vx_i16m4(vint16m4_t vd, int16_t rs1, vint16m4_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vnmsub_vv_i16m8(vint16m8_t vd, vint16m8_t vs1, vint16m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vnmsub_vx_i16m8(vint16m8_t vd, int16_t rs1, vint16m8_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vnmsub_vv_i32m1(vint32m1_t vd, vint32m1_t vs1, vint32m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vnmsub_vx_i32m1(vint32m1_t vd, int32_t rs1, vint32m1_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vnmsub_vv_i32m2(vint32m2_t vd, vint32m2_t vs1, vint32m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vnmsub_vx_i32m2(vint32m2_t vd, int32_t rs1, vint32m2_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vnmsub_vv_i32m4(vint32m4_t vd, vint32m4_t vs1, vint32m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vnmsub_vx_i32m4(vint32m4_t vd, int32_t rs1, vint32m4_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vnmsub_vv_i32m8(vint32m8_t vd, vint32m8_t vs1, vint32m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vnmsub_vx_i32m8(vint32m8_t vd, int32_t rs1, vint32m8_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vnmsub_vv_i64m1(vint64m1_t vd, vint64m1_t vs1, vint64m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vnmsub_vx_i64m1(vint64m1_t vd, int64_t rs1, vint64m1_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vnmsub_vv_i64m2(vint64m2_t vd, vint64m2_t vs1, vint64m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vnmsub_vx_i64m2(vint64m2_t vd, int64_t rs1, vint64m2_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vnmsub_vv_i64m4(vint64m4_t vd, vint64m4_t vs1, vint64m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vnmsub_vx_i64m4(vint64m4_t vd, int64_t rs1, vint64m4_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vnmsub_vv_i64m8(vint64m8_t vd, vint64m8_t vs1, vint64m8_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vnmsub_vx_i64m8(vint64m8_t vd, int64_t rs1, vint64m8_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vnmsub_vv_u8m1(vuint8m1_t vd, vuint8m1_t vs1, vuint8m1_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u8m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vnmsub_vx_u8m1(vuint8m1_t vd, uint8_t rs1, vuint8m1_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vnmsub_vv_u8m2(vuint8m2_t vd, vuint8m2_t vs1, vuint8m2_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u8m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vnmsub_vx_u8m2(vuint8m2_t vd, uint8_t rs1, vuint8m2_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vnmsub_vv_u8m4(vuint8m4_t vd, vuint8m4_t vs1, vuint8m4_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u8m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vnmsub_vx_u8m4(vuint8m4_t vd, uint8_t rs1, vuint8m4_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vnmsub_vv_u8m8(vuint8m8_t vd, vuint8m8_t vs1, vuint8m8_t vs2, si } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u8m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vnmsub_vx_u8m8(vuint8m8_t vd, uint8_t rs1, vuint8m8_t vs2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vnmsub_vv_u16m1(vuint16m1_t vd, vuint16m1_t vs1, vuint16m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u16m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vnmsub_vx_u16m1(vuint16m1_t vd, uint16_t rs1, vuint16m1_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vnmsub_vv_u16m2(vuint16m2_t vd, vuint16m2_t vs1, vuint16m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vnmsub_vx_u16m2(vuint16m2_t vd, uint16_t rs1, vuint16m2_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vnmsub_vv_u16m4(vuint16m4_t vd, vuint16m4_t vs1, vuint16m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vnmsub_vx_u16m4(vuint16m4_t vd, uint16_t rs1, vuint16m4_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vnmsub_vv_u16m8(vuint16m8_t vd, vuint16m8_t vs1, vuint16m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vnmsub_vx_u16m8(vuint16m8_t vd, uint16_t rs1, vuint16m8_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vnmsub_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs1, vuint32m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u32m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vnmsub_vx_u32m1(vuint32m1_t vd, uint32_t rs1, vuint32m1_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vnmsub_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs1, vuint32m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vnmsub_vx_u32m2(vuint32m2_t vd, uint32_t rs1, vuint32m2_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vnmsub_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs1, vuint32m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vnmsub_vx_u32m4(vuint32m4_t vd, uint32_t rs1, vuint32m4_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vnmsub_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs1, vuint32m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vnmsub_vx_u32m8(vuint32m8_t vd, uint32_t rs1, vuint32m8_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vnmsub_vv_u64m1(vuint64m1_t vd, vuint64m1_t vs1, vuint64m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u64m1 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vnmsub_vx_u64m1(vuint64m1_t vd, uint64_t rs1, vuint64m1_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vnmsub_vv_u64m2(vuint64m2_t vd, vuint64m2_t vs1, vuint64m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vnmsub_vx_u64m2(vuint64m2_t vd, uint64_t rs1, vuint64m2_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vnmsub_vv_u64m4(vuint64m4_t vd, vuint64m4_t vs1, vuint64m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vnmsub_vx_u64m4(vuint64m4_t vd, uint64_t rs1, vuint64m4_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vnmsub_vv_u64m8(vuint64m8_t vd, vuint64m8_t vs1, vuint64m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -646,7 +647,7 @@ vuint64m8_t test_vnmsub_vx_u64m8(vuint64m8_t vd, uint64_t rs1, vuint64m8_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -656,7 +657,7 @@ vint8m1_t test_vnmsub_vv_i8m1_m(vbool8_t mask, vint8m1_t vd, vint8m1_t vs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -666,7 +667,7 @@ vint8m1_t test_vnmsub_vx_i8m1_m(vbool8_t mask, vint8m1_t vd, int8_t rs1, vint8m1 } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -676,7 +677,7 @@ vint8m2_t test_vnmsub_vv_i8m2_m(vbool4_t mask, vint8m2_t vd, vint8m2_t vs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -686,7 +687,7 @@ vint8m2_t test_vnmsub_vx_i8m2_m(vbool4_t mask, vint8m2_t vd, int8_t rs1, vint8m2 } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -696,7 +697,7 @@ vint8m4_t test_vnmsub_vv_i8m4_m(vbool2_t mask, vint8m4_t vd, vint8m4_t vs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -706,7 +707,7 @@ vint8m4_t test_vnmsub_vx_i8m4_m(vbool2_t mask, vint8m4_t vd, int8_t rs1, vint8m4 } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -716,7 +717,7 @@ vint8m8_t test_vnmsub_vv_i8m8_m(vbool1_t mask, vint8m8_t vd, vint8m8_t vs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -726,7 +727,7 @@ vint8m8_t test_vnmsub_vx_i8m8_m(vbool1_t mask, vint8m8_t vd, int8_t rs1, vint8m8 } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -736,7 +737,7 @@ vint16m1_t test_vnmsub_vv_i16m1_m(vbool16_t mask, vint16m1_t vd, vint16m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -746,7 +747,7 @@ vint16m1_t test_vnmsub_vx_i16m1_m(vbool16_t mask, vint16m1_t vd, int16_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -756,7 +757,7 @@ vint16m2_t test_vnmsub_vv_i16m2_m(vbool8_t mask, vint16m2_t vd, vint16m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -766,7 +767,7 @@ vint16m2_t test_vnmsub_vx_i16m2_m(vbool8_t mask, vint16m2_t vd, int16_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -776,7 +777,7 @@ vint16m4_t test_vnmsub_vv_i16m4_m(vbool4_t mask, vint16m4_t vd, vint16m4_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -786,7 +787,7 @@ vint16m4_t test_vnmsub_vx_i16m4_m(vbool4_t mask, vint16m4_t vd, int16_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -796,7 +797,7 @@ vint16m8_t test_vnmsub_vv_i16m8_m(vbool2_t mask, vint16m8_t vd, vint16m8_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -806,7 +807,7 @@ vint16m8_t test_vnmsub_vx_i16m8_m(vbool2_t mask, vint16m8_t vd, int16_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -816,7 +817,7 @@ vint32m1_t test_vnmsub_vv_i32m1_m(vbool32_t mask, vint32m1_t vd, vint32m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -826,7 +827,7 @@ vint32m1_t test_vnmsub_vx_i32m1_m(vbool32_t mask, vint32m1_t vd, int32_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -836,7 +837,7 @@ vint32m2_t test_vnmsub_vv_i32m2_m(vbool16_t mask, vint32m2_t vd, vint32m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -846,7 +847,7 @@ vint32m2_t test_vnmsub_vx_i32m2_m(vbool16_t mask, vint32m2_t vd, int32_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -856,7 +857,7 @@ vint32m4_t test_vnmsub_vv_i32m4_m(vbool8_t mask, vint32m4_t vd, vint32m4_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -866,7 +867,7 @@ vint32m4_t test_vnmsub_vx_i32m4_m(vbool8_t mask, vint32m4_t vd, int32_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -876,7 +877,7 @@ vint32m8_t test_vnmsub_vv_i32m8_m(vbool4_t mask, vint32m8_t vd, vint32m8_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -886,7 +887,7 @@ vint32m8_t test_vnmsub_vx_i32m8_m(vbool4_t mask, vint32m8_t vd, int32_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -896,7 +897,7 @@ vint64m1_t test_vnmsub_vv_i64m1_m(vbool64_t mask, vint64m1_t vd, vint64m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -906,7 +907,7 @@ vint64m1_t test_vnmsub_vx_i64m1_m(vbool64_t mask, vint64m1_t vd, int64_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -916,7 +917,7 @@ vint64m2_t test_vnmsub_vv_i64m2_m(vbool32_t mask, vint64m2_t vd, vint64m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -926,7 +927,7 @@ vint64m2_t test_vnmsub_vx_i64m2_m(vbool32_t mask, vint64m2_t vd, int64_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -936,7 +937,7 @@ vint64m4_t test_vnmsub_vv_i64m4_m(vbool16_t mask, vint64m4_t vd, vint64m4_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -946,7 +947,7 @@ vint64m4_t test_vnmsub_vx_i64m4_m(vbool16_t mask, vint64m4_t vd, int64_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -956,7 +957,7 @@ vint64m8_t test_vnmsub_vv_i64m8_m(vbool8_t mask, vint64m8_t vd, vint64m8_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -966,7 +967,7 @@ vint64m8_t test_vnmsub_vx_i64m8_m(vbool8_t mask, vint64m8_t vd, int64_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -976,7 +977,7 @@ vuint8m1_t test_vnmsub_vv_u8m1_m(vbool8_t mask, vuint8m1_t vd, vuint8m1_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -986,7 +987,7 @@ vuint8m1_t test_vnmsub_vx_u8m1_m(vbool8_t mask, vuint8m1_t vd, uint8_t rs1, vuin } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -996,7 +997,7 @@ vuint8m2_t test_vnmsub_vv_u8m2_m(vbool4_t mask, vuint8m2_t vd, vuint8m2_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1006,7 +1007,7 @@ vuint8m2_t test_vnmsub_vx_u8m2_m(vbool4_t mask, vuint8m2_t vd, uint8_t rs1, vuin } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1016,7 +1017,7 @@ vuint8m4_t test_vnmsub_vv_u8m4_m(vbool2_t mask, vuint8m4_t vd, vuint8m4_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv32i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1026,7 +1027,7 @@ vuint8m4_t test_vnmsub_vx_u8m4_m(vbool2_t mask, vuint8m4_t vd, uint8_t rs1, vuin } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv64i8.nxv64i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1036,7 +1037,7 @@ vuint8m8_t test_vnmsub_vv_u8m8_m(vbool1_t mask, vuint8m8_t vd, vuint8m8_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv64i8.i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1046,7 +1047,7 @@ vuint8m8_t test_vnmsub_vx_u8m8_m(vbool1_t mask, vuint8m8_t vd, uint8_t rs1, vuin } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1056,7 +1057,7 @@ vuint16m1_t test_vnmsub_vv_u16m1_m(vbool16_t mask, vuint16m1_t vd, vuint16m1_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1066,7 +1067,7 @@ vuint16m1_t test_vnmsub_vx_u16m1_m(vbool16_t mask, vuint16m1_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1076,7 +1077,7 @@ vuint16m2_t test_vnmsub_vv_u16m2_m(vbool8_t mask, vuint16m2_t vd, vuint16m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1086,7 +1087,7 @@ vuint16m2_t test_vnmsub_vx_u16m2_m(vbool8_t mask, vuint16m2_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1096,7 +1097,7 @@ vuint16m4_t test_vnmsub_vv_u16m4_m(vbool4_t mask, vuint16m4_t vd, vuint16m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1106,7 +1107,7 @@ vuint16m4_t test_vnmsub_vx_u16m4_m(vbool4_t mask, vuint16m4_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv32i16.nxv32i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1116,7 +1117,7 @@ vuint16m8_t test_vnmsub_vv_u16m8_m(vbool2_t mask, vuint16m8_t vd, vuint16m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv32i16.i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1126,7 +1127,7 @@ vuint16m8_t test_vnmsub_vx_u16m8_m(vbool2_t mask, vuint16m8_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1136,7 +1137,7 @@ vuint32m1_t test_vnmsub_vv_u32m1_m(vbool32_t mask, vuint32m1_t vd, vuint32m1_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv2i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1146,7 +1147,7 @@ vuint32m1_t test_vnmsub_vx_u32m1_m(vbool32_t mask, vuint32m1_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1156,7 +1157,7 @@ vuint32m2_t test_vnmsub_vv_u32m2_m(vbool16_t mask, vuint32m2_t vd, vuint32m2_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1166,7 +1167,7 @@ vuint32m2_t test_vnmsub_vx_u32m2_m(vbool16_t mask, vuint32m2_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1176,7 +1177,7 @@ vuint32m4_t test_vnmsub_vv_u32m4_m(vbool8_t mask, vuint32m4_t vd, vuint32m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1186,7 +1187,7 @@ vuint32m4_t test_vnmsub_vx_u32m4_m(vbool8_t mask, vuint32m4_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i32.nxv16i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1196,7 +1197,7 @@ vuint32m8_t test_vnmsub_vv_u32m8_m(vbool4_t mask, vuint32m8_t vd, vuint32m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv16i32.i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1206,7 +1207,7 @@ vuint32m8_t test_vnmsub_vx_u32m8_m(vbool4_t mask, vuint32m8_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv1i64.nxv1i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1216,7 +1217,7 @@ vuint64m1_t test_vnmsub_vv_u64m1_m(vbool64_t mask, vuint64m1_t vd, vuint64m1_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv1i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1226,7 +1227,7 @@ vuint64m1_t test_vnmsub_vx_u64m1_m(vbool64_t mask, vuint64m1_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv2i64.nxv2i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1236,7 +1237,7 @@ vuint64m2_t test_vnmsub_vv_u64m2_m(vbool32_t mask, vuint64m2_t vd, vuint64m2_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv2i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1246,7 +1247,7 @@ vuint64m2_t test_vnmsub_vx_u64m2_m(vbool32_t mask, vuint64m2_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i64.nxv4i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1256,7 +1257,7 @@ vuint64m4_t test_vnmsub_vv_u64m4_m(vbool16_t mask, vuint64m4_t vd, vuint64m4_t v } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv4i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1266,7 +1267,7 @@ vuint64m4_t test_vnmsub_vx_u64m4_m(vbool16_t mask, vuint64m4_t vd, uint64_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i64.nxv8i64.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1276,7 +1277,7 @@ vuint64m8_t test_vnmsub_vv_u64m8_m(vbool8_t mask, vuint64m8_t vd, vuint64m8_t vs } // CHECK-RV64-LABEL: define dso_local @test_vnmsub_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i64 noundef [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vnmsub.mask.nxv8i64.i64.i64( [[VD]], i64 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmul.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmul.c index a1df9f0e13c24c..7333e1c0380ecb 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmul.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmul.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vmul_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vmul_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vmul_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vmul_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vmul_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vmul_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vmul_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vmul_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vmul_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vmul_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vmul_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vmul_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vmul_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vmul_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vmul_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vmul_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vmul_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vmul_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vmul_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vmul_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vmul_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vmul_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vmul_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vmul_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vmul_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vmul_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vmul_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vmul_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vmul_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vmul_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vmul_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vmul_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vmul_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vmul_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vmul_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vmul_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vmul_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vmul_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vmul_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vmul_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vmul_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vmul_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vmul_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vmul_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vmul_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vmul_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vmul_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vmul_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vmul_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vmul_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vmul_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vmul_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vmul_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vmul_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vmul_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vmul_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vmul_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vmul_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vmul_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vmul_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vmul_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vmul_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vmul_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -646,7 +647,7 @@ vuint64m8_t test_vmul_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -656,7 +657,7 @@ vint8m1_t test_vmul_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -666,7 +667,7 @@ vint8m1_t test_vmul_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -676,7 +677,7 @@ vint8m2_t test_vmul_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -686,7 +687,7 @@ vint8m2_t test_vmul_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -696,7 +697,7 @@ vint8m4_t test_vmul_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -706,7 +707,7 @@ vint8m4_t test_vmul_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -716,7 +717,7 @@ vint8m8_t test_vmul_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -726,7 +727,7 @@ vint8m8_t test_vmul_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t v } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -736,7 +737,7 @@ vint16m1_t test_vmul_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -746,7 +747,7 @@ vint16m1_t test_vmul_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -756,7 +757,7 @@ vint16m2_t test_vmul_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -766,7 +767,7 @@ vint16m2_t test_vmul_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -776,7 +777,7 @@ vint16m4_t test_vmul_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -786,7 +787,7 @@ vint16m4_t test_vmul_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -796,7 +797,7 @@ vint16m8_t test_vmul_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -806,7 +807,7 @@ vint16m8_t test_vmul_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -816,7 +817,7 @@ vint32m1_t test_vmul_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -826,7 +827,7 @@ vint32m1_t test_vmul_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -836,7 +837,7 @@ vint32m2_t test_vmul_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -846,7 +847,7 @@ vint32m2_t test_vmul_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -856,7 +857,7 @@ vint32m4_t test_vmul_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -866,7 +867,7 @@ vint32m4_t test_vmul_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -876,7 +877,7 @@ vint32m8_t test_vmul_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -886,7 +887,7 @@ vint32m8_t test_vmul_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -896,7 +897,7 @@ vint64m1_t test_vmul_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -906,7 +907,7 @@ vint64m1_t test_vmul_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -916,7 +917,7 @@ vint64m2_t test_vmul_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -926,7 +927,7 @@ vint64m2_t test_vmul_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -936,7 +937,7 @@ vint64m4_t test_vmul_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -946,7 +947,7 @@ vint64m4_t test_vmul_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -956,7 +957,7 @@ vint64m8_t test_vmul_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -966,7 +967,7 @@ vint64m8_t test_vmul_vx_i64m8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -976,7 +977,7 @@ vuint8m1_t test_vmul_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -986,7 +987,7 @@ vuint8m1_t test_vmul_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -996,7 +997,7 @@ vuint8m2_t test_vmul_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1006,7 +1007,7 @@ vuint8m2_t test_vmul_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1016,7 +1017,7 @@ vuint8m4_t test_vmul_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1026,7 +1027,7 @@ vuint8m4_t test_vmul_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1036,7 +1037,7 @@ vuint8m8_t test_vmul_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1046,7 +1047,7 @@ vuint8m8_t test_vmul_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1056,7 +1057,7 @@ vuint16m1_t test_vmul_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t op } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1066,7 +1067,7 @@ vuint16m1_t test_vmul_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1076,7 +1077,7 @@ vuint16m2_t test_vmul_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1086,7 +1087,7 @@ vuint16m2_t test_vmul_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1096,7 +1097,7 @@ vuint16m4_t test_vmul_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1106,7 +1107,7 @@ vuint16m4_t test_vmul_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1116,7 +1117,7 @@ vuint16m8_t test_vmul_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1126,7 +1127,7 @@ vuint16m8_t test_vmul_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1136,7 +1137,7 @@ vuint32m1_t test_vmul_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1146,7 +1147,7 @@ vuint32m1_t test_vmul_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1156,7 +1157,7 @@ vuint32m2_t test_vmul_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1166,7 +1167,7 @@ vuint32m2_t test_vmul_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1176,7 +1177,7 @@ vuint32m4_t test_vmul_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1186,7 +1187,7 @@ vuint32m4_t test_vmul_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1196,7 +1197,7 @@ vuint32m8_t test_vmul_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1206,7 +1207,7 @@ vuint32m8_t test_vmul_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1216,7 +1217,7 @@ vuint64m1_t test_vmul_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t op } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1226,7 +1227,7 @@ vuint64m1_t test_vmul_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1236,7 +1237,7 @@ vuint64m2_t test_vmul_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1246,7 +1247,7 @@ vuint64m2_t test_vmul_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1256,7 +1257,7 @@ vuint64m4_t test_vmul_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1266,7 +1267,7 @@ vuint64m4_t test_vmul_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmul_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -1276,7 +1277,7 @@ vuint64m8_t test_vmul_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmul_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmul.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulh.c index d8dd16b178ef08..f449373451f992 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulh.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulh.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vmulh_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vmulh_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vmulh_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vmulh_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vmulh_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vmulh_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vmulh_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -85,7 +86,7 @@ vint8m8_t test_vmulh_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { return __riscv_th_vmulh_vx_i8m8(op1, op2, vl); } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -95,7 +96,7 @@ vint16m1_t test_vmulh_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -105,7 +106,7 @@ vint16m1_t test_vmulh_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -115,7 +116,7 @@ vint16m2_t test_vmulh_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -125,7 +126,7 @@ vint16m2_t test_vmulh_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -135,7 +136,7 @@ vint16m4_t test_vmulh_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -145,7 +146,7 @@ vint16m4_t test_vmulh_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -155,7 +156,7 @@ vint16m8_t test_vmulh_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -165,7 +166,7 @@ vint16m8_t test_vmulh_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -175,7 +176,7 @@ vint32m1_t test_vmulh_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -185,7 +186,7 @@ vint32m1_t test_vmulh_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -195,7 +196,7 @@ vint32m2_t test_vmulh_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -205,7 +206,7 @@ vint32m2_t test_vmulh_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -215,7 +216,7 @@ vint32m4_t test_vmulh_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -225,7 +226,7 @@ vint32m4_t test_vmulh_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -235,7 +236,7 @@ vint32m8_t test_vmulh_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -245,7 +246,7 @@ vint32m8_t test_vmulh_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -255,7 +256,7 @@ vint64m1_t test_vmulh_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -265,7 +266,7 @@ vint64m1_t test_vmulh_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -275,7 +276,7 @@ vint64m2_t test_vmulh_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -285,7 +286,7 @@ vint64m2_t test_vmulh_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -295,7 +296,7 @@ vint64m4_t test_vmulh_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -305,7 +306,7 @@ vint64m4_t test_vmulh_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -315,7 +316,7 @@ vint64m8_t test_vmulh_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -325,7 +326,7 @@ vint64m8_t test_vmulh_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -335,7 +336,7 @@ vint8m1_t test_vmulh_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -345,7 +346,7 @@ vint8m1_t test_vmulh_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -355,7 +356,7 @@ vint8m2_t test_vmulh_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -365,7 +366,7 @@ vint8m2_t test_vmulh_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -375,7 +376,7 @@ vint8m4_t test_vmulh_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -385,7 +386,7 @@ vint8m4_t test_vmulh_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -395,7 +396,7 @@ vint8m8_t test_vmulh_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -405,7 +406,7 @@ vint8m8_t test_vmulh_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -415,7 +416,7 @@ vint16m1_t test_vmulh_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -425,7 +426,7 @@ vint16m1_t test_vmulh_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -435,7 +436,7 @@ vint16m2_t test_vmulh_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -445,7 +446,7 @@ vint16m2_t test_vmulh_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -455,7 +456,7 @@ vint16m4_t test_vmulh_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -465,7 +466,7 @@ vint16m4_t test_vmulh_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -475,7 +476,7 @@ vint16m8_t test_vmulh_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -485,7 +486,7 @@ vint16m8_t test_vmulh_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -495,7 +496,7 @@ vint32m1_t test_vmulh_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -505,7 +506,7 @@ vint32m1_t test_vmulh_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -515,7 +516,7 @@ vint32m2_t test_vmulh_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -525,7 +526,7 @@ vint32m2_t test_vmulh_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -535,7 +536,7 @@ vint32m4_t test_vmulh_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -545,7 +546,7 @@ vint32m4_t test_vmulh_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -555,7 +556,7 @@ vint32m8_t test_vmulh_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -565,7 +566,7 @@ vint32m8_t test_vmulh_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -575,7 +576,7 @@ vint64m1_t test_vmulh_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -585,7 +586,7 @@ vint64m1_t test_vmulh_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -595,7 +596,7 @@ vint64m2_t test_vmulh_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -605,7 +606,7 @@ vint64m2_t test_vmulh_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -615,7 +616,7 @@ vint64m4_t test_vmulh_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -625,7 +626,7 @@ vint64m4_t test_vmulh_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -635,7 +636,7 @@ vint64m8_t test_vmulh_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulh_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulh.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulhsu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulhsu.c index 036adbaa6f24b6..bd8d507660a79c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulhsu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulhsu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vmulhsu_vv_i8m1(vint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vmulhsu_vx_i8m1(vint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vmulhsu_vv_i8m2(vint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vmulhsu_vx_i8m2(vint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vmulhsu_vv_i8m4(vint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vmulhsu_vx_i8m4(vint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vmulhsu_vv_i8m8(vint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vmulhsu_vx_i8m8(vint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vmulhsu_vv_i16m1(vint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vmulhsu_vx_i16m1(vint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vmulhsu_vv_i16m2(vint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vmulhsu_vx_i16m2(vint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vmulhsu_vv_i16m4(vint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vmulhsu_vx_i16m4(vint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vmulhsu_vv_i16m8(vint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vmulhsu_vx_i16m8(vint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vmulhsu_vv_i32m1(vint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vmulhsu_vx_i32m1(vint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vmulhsu_vv_i32m2(vint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vmulhsu_vx_i32m2(vint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vmulhsu_vv_i32m4(vint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vmulhsu_vx_i32m4(vint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vmulhsu_vv_i32m8(vint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vmulhsu_vx_i32m8(vint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vmulhsu_vv_i64m1(vint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vmulhsu_vx_i64m1(vint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vmulhsu_vv_i64m2(vint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vmulhsu_vx_i64m2(vint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vmulhsu_vv_i64m4(vint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vmulhsu_vx_i64m4(vint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vmulhsu_vv_i64m8(vint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vmulhsu_vx_i64m8(vint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vmulhsu_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vuint8m1_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vmulhsu_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m2_t test_vmulhsu_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vuint8m2_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m2_t test_vmulhsu_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint8m4_t test_vmulhsu_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vuint8m4_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint8m4_t test_vmulhsu_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint8m8_t test_vmulhsu_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vuint8m8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint8m8_t test_vmulhsu_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, uint8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint16m1_t test_vmulhsu_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vuint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint16m1_t test_vmulhsu_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint16m2_t test_vmulhsu_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vuint16m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint16m2_t test_vmulhsu_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint16m4_t test_vmulhsu_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vuint16m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint16m4_t test_vmulhsu_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint16m8_t test_vmulhsu_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vuint16m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint16m8_t test_vmulhsu_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vint32m1_t test_vmulhsu_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vuint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vint32m1_t test_vmulhsu_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vint32m2_t test_vmulhsu_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vuint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vint32m2_t test_vmulhsu_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vint32m4_t test_vmulhsu_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vuint32m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vint32m4_t test_vmulhsu_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vint32m8_t test_vmulhsu_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vuint32m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vint32m8_t test_vmulhsu_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vint64m1_t test_vmulhsu_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vuint64m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vint64m1_t test_vmulhsu_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vint64m2_t test_vmulhsu_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vuint64m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vint64m2_t test_vmulhsu_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vint64m4_t test_vmulhsu_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vuint64m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vint64m4_t test_vmulhsu_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, uint64_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vint64m8_t test_vmulhsu_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vuint64m8_t op } // CHECK-RV64-LABEL: define dso_local @test_vmulhsu_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhsu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulhu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulhu.c index 49ea7a08745df4..2bfea65a136e5c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulhu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-mul/thead/vmulhu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vmulhu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vmulhu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vmulhu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vmulhu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vmulhu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vmulhu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m8_t test_vmulhu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint8m8_t test_vmulhu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_vmulhu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m1_t test_vmulhu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m2_t test_vmulhu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m2_t test_vmulhu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m4_t test_vmulhu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint16m4_t test_vmulhu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint16m8_t test_vmulhu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint16m8_t test_vmulhu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m1_t test_vmulhu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m1_t test_vmulhu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m2_t test_vmulhu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m2_t test_vmulhu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vmulhu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vmulhu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vmulhu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vmulhu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint64m1_t test_vmulhu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_vmulhu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_vmulhu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vmulhu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m4_t test_vmulhu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m4_t test_vmulhu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m8_t test_vmulhu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m8_t test_vmulhu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vmulhu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vmulhu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vmulhu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vmulhu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vmulhu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vmulhu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vmulhu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vmulhu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vmulhu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vmulhu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vmulhu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vmulhu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vmulhu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vmulhu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vmulhu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vmulhu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vmulhu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vmulhu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vmulhu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vmulhu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vmulhu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vmulhu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vmulhu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vmulhu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vmulhu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vmulhu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vmulhu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vmulhu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vmulhu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vmulhu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vmulhu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vmulhu_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmulhu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vsadd.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vsadd.c index ba40fed5b64f16..4fb9e4b8108d0a 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vsadd.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vsadd.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vsadd_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vsadd_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vsadd_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vsadd_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vsadd_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vsadd_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vsadd_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vsadd_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vsadd_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vsadd_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vsadd_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vsadd_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vsadd_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vsadd_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vsadd_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vsadd_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vsadd_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vsadd_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vsadd_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vsadd_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vsadd_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vsadd_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vsadd_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vsadd_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vsadd_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vsadd_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vsadd_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vsadd_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vsadd_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vsadd_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vsadd_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vsadd_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vsadd_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vsadd_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m2_t test_vsadd_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m2_t test_vsadd_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint8m4_t test_vsadd_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint8m4_t test_vsadd_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint8m8_t test_vsadd_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint8m8_t test_vsadd_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint16m1_t test_vsadd_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint16m1_t test_vsadd_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint16m2_t test_vsadd_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint16m2_t test_vsadd_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint16m4_t test_vsadd_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint16m4_t test_vsadd_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint16m8_t test_vsadd_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint16m8_t test_vsadd_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vint32m1_t test_vsadd_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vint32m1_t test_vsadd_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vint32m2_t test_vsadd_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vint32m2_t test_vsadd_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vint32m4_t test_vsadd_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vint32m4_t test_vsadd_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vint32m8_t test_vsadd_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vint32m8_t test_vsadd_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vint64m1_t test_vsadd_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vint64m1_t test_vsadd_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vint64m2_t test_vsadd_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vint64m2_t test_vsadd_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vint64m4_t test_vsadd_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vint64m4_t test_vsadd_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vint64m8_t test_vsadd_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vsaddu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vsaddu.c index 5a688a7e780455..5704bb2cc5a6e1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vsaddu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vsaddu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vsaddu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vsaddu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vsaddu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vsaddu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vsaddu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vsaddu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m8_t test_vsaddu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint8m8_t test_vsaddu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_vsaddu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m1_t test_vsaddu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m2_t test_vsaddu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m2_t test_vsaddu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m4_t test_vsaddu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint16m4_t test_vsaddu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint16m8_t test_vsaddu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint16m8_t test_vsaddu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m1_t test_vsaddu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m1_t test_vsaddu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m2_t test_vsaddu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m2_t test_vsaddu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vsaddu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vsaddu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vsaddu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vsaddu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint64m1_t test_vsaddu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_vsaddu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_vsaddu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vsaddu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m4_t test_vsaddu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m4_t test_vsaddu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m8_t test_vsaddu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m8_t test_vsaddu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vsaddu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vsaddu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vsaddu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vsaddu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vsaddu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vsaddu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vsaddu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vsaddu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vsaddu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vsaddu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vsaddu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vsaddu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vsaddu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vsaddu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vsaddu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vsaddu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vsaddu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vsaddu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vsaddu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vsaddu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vsaddu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vsaddu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vsaddu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vsaddu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vsaddu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vsaddu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vsaddu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vsaddu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vsaddu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vsaddu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vsaddu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vssub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vssub.c index c2d59987f0c64c..2827afb62f7b49 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vssub.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vssub.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vssub_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vssub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vssub_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vssub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vssub_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vssub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vssub_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vssub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vssub_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vssub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vssub_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vssub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vssub_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vssub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vssub_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vssub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vssub_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vssub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vssub_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vssub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vssub_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vssub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vssub_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vssub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vssub_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vssub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vssub_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vssub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vssub_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vssub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vssub_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vssub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vssub_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vssub_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m2_t test_vssub_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m2_t test_vssub_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint8m4_t test_vssub_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint8m4_t test_vssub_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint8m8_t test_vssub_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint8m8_t test_vssub_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint16m1_t test_vssub_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint16m1_t test_vssub_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint16m2_t test_vssub_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint16m2_t test_vssub_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint16m4_t test_vssub_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint16m4_t test_vssub_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint16m8_t test_vssub_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint16m8_t test_vssub_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vint32m1_t test_vssub_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vint32m1_t test_vssub_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vint32m2_t test_vssub_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vint32m2_t test_vssub_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vint32m4_t test_vssub_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vint32m4_t test_vssub_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vint32m8_t test_vssub_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vint32m8_t test_vssub_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vint64m1_t test_vssub_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vint64m1_t test_vssub_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vint64m2_t test_vssub_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vint64m2_t test_vssub_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vint64m4_t test_vssub_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vint64m4_t test_vssub_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vint64m8_t test_vssub_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vssubu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vssubu.c index 35d7a8d5294986..c4a063b80a2261 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vssubu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/thead/vssubu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vssubu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vssubu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vssubu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vssubu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vssubu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vssubu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m8_t test_vssubu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint8m8_t test_vssubu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_vssubu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m1_t test_vssubu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m2_t test_vssubu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m2_t test_vssubu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m4_t test_vssubu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint16m4_t test_vssubu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint16m8_t test_vssubu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint16m8_t test_vssubu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m1_t test_vssubu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m1_t test_vssubu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m2_t test_vssubu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m2_t test_vssubu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vssubu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vssubu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vssubu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vssubu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint64m1_t test_vssubu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_vssubu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_vssubu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vssubu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m4_t test_vssubu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m4_t test_vssubu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m8_t test_vssubu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m8_t test_vssubu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vssubu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vssubu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vssubu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vssubu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vssubu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vssubu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vssubu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vssubu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vssubu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vssubu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vssubu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vssubu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vssubu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vssubu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vssubu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vssubu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vssubu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vssubu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vssubu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vssubu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vssubu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vssubu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vssubu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vssubu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vssubu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vssubu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vssubu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vssubu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vssubu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vssubu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vssubu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vsadd.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vsadd.c index b4aec71fce579e..e81fcef3a2ba9a 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vsadd.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vsadd.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vsadd_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vsadd_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vsadd_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vsadd_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vsadd_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vsadd_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vsadd_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vsadd_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vsadd_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vsadd_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vsadd_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vsadd_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vsadd_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vsadd_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vsadd_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vsadd_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vsadd_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vsadd_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vsadd_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vsadd_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vsadd_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vsadd_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vsadd_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vsadd_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vsadd_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vsadd_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vsadd_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vsadd_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vsadd_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vsadd_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vsadd_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vsadd_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vsadd_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vsadd_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m2_t test_vsadd_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m2_t test_vsadd_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint8m4_t test_vsadd_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint8m4_t test_vsadd_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint8m8_t test_vsadd_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint8m8_t test_vsadd_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint16m1_t test_vsadd_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint16m1_t test_vsadd_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint16m2_t test_vsadd_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint16m2_t test_vsadd_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint16m4_t test_vsadd_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint16m4_t test_vsadd_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint16m8_t test_vsadd_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint16m8_t test_vsadd_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vint32m1_t test_vsadd_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vint32m1_t test_vsadd_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vint32m2_t test_vsadd_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vint32m2_t test_vsadd_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vint32m4_t test_vsadd_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vint32m4_t test_vsadd_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vint32m8_t test_vsadd_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vint32m8_t test_vsadd_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vint64m1_t test_vsadd_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vint64m1_t test_vsadd_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vint64m2_t test_vsadd_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vint64m2_t test_vsadd_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vint64m4_t test_vsadd_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vint64m4_t test_vsadd_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vint64m8_t test_vsadd_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsadd_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsadd.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vsaddu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vsaddu.c index 9b2c7c2abd6a0f..b8695689a42518 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vsaddu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vsaddu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vsaddu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vsaddu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vsaddu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vsaddu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vsaddu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vsaddu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m8_t test_vsaddu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint8m8_t test_vsaddu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_vsaddu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m1_t test_vsaddu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m2_t test_vsaddu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m2_t test_vsaddu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m4_t test_vsaddu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint16m4_t test_vsaddu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint16m8_t test_vsaddu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint16m8_t test_vsaddu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m1_t test_vsaddu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m1_t test_vsaddu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m2_t test_vsaddu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m2_t test_vsaddu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vsaddu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vsaddu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vsaddu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vsaddu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint64m1_t test_vsaddu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_vsaddu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_vsaddu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vsaddu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m4_t test_vsaddu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m4_t test_vsaddu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m8_t test_vsaddu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m8_t test_vsaddu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vsaddu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vsaddu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vsaddu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vsaddu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vsaddu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vsaddu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vsaddu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vsaddu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vsaddu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vsaddu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vsaddu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vsaddu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vsaddu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vsaddu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vsaddu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vsaddu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vsaddu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vsaddu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vsaddu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vsaddu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vsaddu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vsaddu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vsaddu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vsaddu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vsaddu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vsaddu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vsaddu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vsaddu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vsaddu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vsaddu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vsaddu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vsaddu_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsaddu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vssub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vssub.c index 75207b1da84735..b6688705485ff7 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vssub.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vssub.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vssub_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vssub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vssub_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vssub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vssub_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vssub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vssub_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vssub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vssub_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vssub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vssub_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vssub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vssub_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vssub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vssub_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vssub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vssub_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vssub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vssub_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vssub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vssub_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vssub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vssub_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vssub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vssub_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vssub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vssub_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vssub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vssub_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vssub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vssub_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vssub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint8m1_t test_vssub_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint8m1_t test_vssub_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint8m2_t test_vssub_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint8m2_t test_vssub_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint8m4_t test_vssub_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint8m4_t test_vssub_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vint8m8_t test_vssub_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vint8m8_t test_vssub_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vint16m1_t test_vssub_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vint16m1_t test_vssub_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vint16m2_t test_vssub_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vint16m2_t test_vssub_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vint16m4_t test_vssub_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vint16m4_t test_vssub_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vint16m8_t test_vssub_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vint16m8_t test_vssub_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vint32m1_t test_vssub_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vint32m1_t test_vssub_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vint32m2_t test_vssub_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vint32m2_t test_vssub_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vint32m4_t test_vssub_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vint32m4_t test_vssub_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vint32m8_t test_vssub_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vint32m8_t test_vssub_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vint64m1_t test_vssub_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vint64m1_t test_vssub_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vint64m2_t test_vssub_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vint64m2_t test_vssub_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vint64m4_t test_vssub_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vint64m4_t test_vssub_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vssub_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vint64m8_t test_vssub_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssub_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssub.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vssubu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vssubu.c index dc53feb5498bb5..8d5317535c6f5a 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vssubu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-saturating-add/wrappers/vssubu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vssubu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vssubu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vssubu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vssubu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vssubu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vssubu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m8_t test_vssubu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint8m8_t test_vssubu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_vssubu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m1_t test_vssubu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m2_t test_vssubu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m2_t test_vssubu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m4_t test_vssubu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint16m4_t test_vssubu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint16m8_t test_vssubu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint16m8_t test_vssubu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m1_t test_vssubu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m1_t test_vssubu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m2_t test_vssubu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m2_t test_vssubu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vssubu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vssubu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vssubu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vssubu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint64m1_t test_vssubu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_vssubu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_vssubu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vssubu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m4_t test_vssubu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m4_t test_vssubu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m8_t test_vssubu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m8_t test_vssubu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vssubu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vssubu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vssubu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vssubu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vssubu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vssubu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vssubu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u8m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vssubu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vssubu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vssubu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vssubu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vssubu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vssubu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vssubu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vssubu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vssubu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vssubu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vssubu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vssubu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vssubu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vssubu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vssubu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vssubu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vssubu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vssubu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m1_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vssubu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vssubu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vssubu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vssubu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vssubu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vssubu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t o } // CHECK-RV64-LABEL: define dso_local @test_vssubu_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vssubu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsll.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsll.c index c60bd876d82cdf..f30069decbba2c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsll.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsll.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vsll_vv_i8m1(vint8m1_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vsll_vx_i8m1(vint8m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vsll_vv_i8m2(vint8m2_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vsll_vx_i8m2(vint8m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vsll_vv_i8m4(vint8m4_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vsll_vx_i8m4(vint8m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vsll_vv_i8m8(vint8m8_t op1, vuint8m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv64i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vsll_vx_i8m8(vint8m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vsll_vv_i16m1(vint16m1_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vsll_vx_i16m1(vint16m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vsll_vv_i16m2(vint16m2_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vsll_vx_i16m2(vint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vsll_vv_i16m4(vint16m4_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vsll_vx_i16m4(vint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vsll_vv_i16m8(vint16m8_t op1, vuint16m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vsll_vx_i16m8(vint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vsll_vv_i32m1(vint32m1_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vsll_vx_i32m1(vint32m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vsll_vv_i32m2(vint32m2_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vsll_vx_i32m2(vint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vsll_vv_i32m4(vint32m4_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vsll_vx_i32m4(vint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vsll_vv_i32m8(vint32m8_t op1, vuint32m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vsll_vx_i32m8(vint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vsll_vv_i64m1(vint64m1_t op1, vuint64m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv1i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vsll_vx_i64m1(vint64m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vsll_vv_i64m2(vint64m2_t op1, vuint64m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vsll_vx_i64m2(vint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vsll_vv_i64m4(vint64m4_t op1, vuint64m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vsll_vx_i64m4(vint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vsll_vv_i64m8(vint64m8_t op1, vuint64m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vsll_vx_i64m8(vint64m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vsll_vv_u8m1(vuint8m1_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vsll_vx_u8m1(vuint8m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vsll_vv_u8m2(vuint8m2_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vsll_vx_u8m2(vuint8m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vsll_vv_u8m4(vuint8m4_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vsll_vx_u8m4(vuint8m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vsll_vv_u8m8(vuint8m8_t op1, vuint8m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv64i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vsll_vx_u8m8(vuint8m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vsll_vv_u16m1(vuint16m1_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vsll_vx_u16m1(vuint16m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vsll_vv_u16m2(vuint16m2_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vsll_vx_u16m2(vuint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vsll_vv_u16m4(vuint16m4_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vsll_vx_u16m4(vuint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vsll_vv_u16m8(vuint16m8_t op1, vuint16m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vsll_vx_u16m8(vuint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vsll_vv_u32m1(vuint32m1_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vsll_vx_u32m1(vuint32m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vsll_vv_u32m2(vuint32m2_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vsll_vx_u32m2(vuint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vsll_vv_u32m4(vuint32m4_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vsll_vx_u32m4(vuint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vsll_vv_u32m8(vuint32m8_t op1, vuint32m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vsll_vx_u32m8(vuint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vsll_vv_u64m1(vuint64m1_t op1, vuint64m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv1i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vsll_vx_u64m1(vuint64m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vsll_vv_u64m2(vuint64m2_t op1, vuint64m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vsll_vx_u64m2(vuint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vsll_vv_u64m4(vuint64m4_t op1, vuint64m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vsll_vx_u64m4(vuint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vsll_vv_u64m8(vuint64m8_t op1, vuint64m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsra.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsra.c index f2acd8f1813ddc..b6b1b0403eaf53 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsra.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsra.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vsra_vv_i8m1(vint8m1_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vsra_vx_i8m1(vint8m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vsra_vv_i8m2(vint8m2_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vsra_vx_i8m2(vint8m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vsra_vv_i8m4(vint8m4_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv32i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vsra_vx_i8m4(vint8m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vsra_vv_i8m8(vint8m8_t op1, vuint8m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv64i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vsra_vx_i8m8(vint8m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vsra_vv_i16m1(vint16m1_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vsra_vx_i16m1(vint16m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vsra_vv_i16m2(vint16m2_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vsra_vx_i16m2(vint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vsra_vv_i16m4(vint16m4_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vsra_vx_i16m4(vint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vsra_vv_i16m8(vint16m8_t op1, vuint16m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vsra_vx_i16m8(vint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vsra_vv_i32m1(vint32m1_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv2i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vsra_vx_i32m1(vint32m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vsra_vv_i32m2(vint32m2_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vsra_vx_i32m2(vint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vsra_vv_i32m4(vint32m4_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vsra_vx_i32m4(vint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vsra_vv_i32m8(vint32m8_t op1, vuint32m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vsra_vx_i32m8(vint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vsra_vv_i64m1(vint64m1_t op1, vuint64m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv1i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vsra_vx_i64m1(vint64m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vsra_vv_i64m2(vint64m2_t op1, vuint64m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vsra_vx_i64m2(vint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vsra_vv_i64m4(vint64m4_t op1, vuint64m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vsra_vx_i64m4(vint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vsra_vv_i64m8(vint64m8_t op1, vuint64m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsrl.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsrl.c index 57e9d4caa32b73..5fab1ddf7ea256 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsrl.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/thead/vsrl.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vsrl_vv_u8m1(vuint8m1_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vsrl_vx_u8m1(vuint8m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vsrl_vv_u8m2(vuint8m2_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vsrl_vx_u8m2(vuint8m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vsrl_vv_u8m4(vuint8m4_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv32i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vsrl_vx_u8m4(vuint8m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m8_t test_vsrl_vv_u8m8(vuint8m8_t op1, vuint8m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv64i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint8m8_t test_vsrl_vx_u8m8(vuint8m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_vsrl_vv_u16m1(vuint16m1_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m1_t test_vsrl_vx_u16m1(vuint16m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m2_t test_vsrl_vv_u16m2(vuint16m2_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m2_t test_vsrl_vx_u16m2(vuint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m4_t test_vsrl_vv_u16m4(vuint16m4_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint16m4_t test_vsrl_vx_u16m4(vuint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint16m8_t test_vsrl_vv_u16m8(vuint16m8_t op1, vuint16m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint16m8_t test_vsrl_vx_u16m8(vuint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m1_t test_vsrl_vv_u32m1(vuint32m1_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv2i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m1_t test_vsrl_vx_u32m1(vuint32m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m2_t test_vsrl_vv_u32m2(vuint32m2_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m2_t test_vsrl_vx_u32m2(vuint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vsrl_vv_u32m4(vuint32m4_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vsrl_vx_u32m4(vuint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vsrl_vv_u32m8(vuint32m8_t op1, vuint32m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vsrl_vx_u32m8(vuint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint64m1_t test_vsrl_vv_u64m1(vuint64m1_t op1, vuint64m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv1i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_vsrl_vx_u64m1(vuint64m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_vsrl_vv_u64m2(vuint64m2_t op1, vuint64m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vsrl_vx_u64m2(vuint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m4_t test_vsrl_vv_u64m4(vuint64m4_t op1, vuint64m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m4_t test_vsrl_vx_u64m4(vuint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m8_t test_vsrl_vv_u64m8(vuint64m8_t op1, vuint64m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsll.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsll.c index 711668e6b27628..df82c526b3dc4c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsll.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsll.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vsll_vv_i8m1(vint8m1_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vsll_vx_i8m1(vint8m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vsll_vv_i8m2(vint8m2_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vsll_vx_i8m2(vint8m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vsll_vv_i8m4(vint8m4_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vsll_vx_i8m4(vint8m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vsll_vv_i8m8(vint8m8_t op1, vuint8m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv64i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vsll_vx_i8m8(vint8m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vsll_vv_i16m1(vint16m1_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vsll_vx_i16m1(vint16m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vsll_vv_i16m2(vint16m2_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vsll_vx_i16m2(vint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vsll_vv_i16m4(vint16m4_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vsll_vx_i16m4(vint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vsll_vv_i16m8(vint16m8_t op1, vuint16m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vsll_vx_i16m8(vint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vsll_vv_i32m1(vint32m1_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vsll_vx_i32m1(vint32m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vsll_vv_i32m2(vint32m2_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vsll_vx_i32m2(vint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vsll_vv_i32m4(vint32m4_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vsll_vx_i32m4(vint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vsll_vv_i32m8(vint32m8_t op1, vuint32m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vsll_vx_i32m8(vint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vsll_vv_i64m1(vint64m1_t op1, vuint64m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv1i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vsll_vx_i64m1(vint64m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vsll_vv_i64m2(vint64m2_t op1, vuint64m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vsll_vx_i64m2(vint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vsll_vv_i64m4(vint64m4_t op1, vuint64m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vsll_vx_i64m4(vint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vsll_vv_i64m8(vint64m8_t op1, vuint64m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m8_t test_vsll_vx_i64m8(vint64m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint8m1_t test_vsll_vv_u8m1(vuint8m1_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint8m1_t test_vsll_vx_u8m1(vuint8m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint8m2_t test_vsll_vv_u8m2(vuint8m2_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint8m2_t test_vsll_vx_u8m2(vuint8m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint8m4_t test_vsll_vv_u8m4(vuint8m4_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint8m4_t test_vsll_vx_u8m4(vuint8m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -396,7 +397,7 @@ vuint8m8_t test_vsll_vv_u8m8(vuint8m8_t op1, vuint8m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv64i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -406,7 +407,7 @@ vuint8m8_t test_vsll_vx_u8m8(vuint8m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -416,7 +417,7 @@ vuint16m1_t test_vsll_vv_u16m1(vuint16m1_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -426,7 +427,7 @@ vuint16m1_t test_vsll_vx_u16m1(vuint16m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -436,7 +437,7 @@ vuint16m2_t test_vsll_vv_u16m2(vuint16m2_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -446,7 +447,7 @@ vuint16m2_t test_vsll_vx_u16m2(vuint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -456,7 +457,7 @@ vuint16m4_t test_vsll_vv_u16m4(vuint16m4_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -466,7 +467,7 @@ vuint16m4_t test_vsll_vx_u16m4(vuint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -476,7 +477,7 @@ vuint16m8_t test_vsll_vv_u16m8(vuint16m8_t op1, vuint16m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -486,7 +487,7 @@ vuint16m8_t test_vsll_vx_u16m8(vuint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -496,7 +497,7 @@ vuint32m1_t test_vsll_vv_u32m1(vuint32m1_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -506,7 +507,7 @@ vuint32m1_t test_vsll_vx_u32m1(vuint32m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -516,7 +517,7 @@ vuint32m2_t test_vsll_vv_u32m2(vuint32m2_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -526,7 +527,7 @@ vuint32m2_t test_vsll_vx_u32m2(vuint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -536,7 +537,7 @@ vuint32m4_t test_vsll_vv_u32m4(vuint32m4_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -546,7 +547,7 @@ vuint32m4_t test_vsll_vx_u32m4(vuint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -556,7 +557,7 @@ vuint32m8_t test_vsll_vv_u32m8(vuint32m8_t op1, vuint32m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -566,7 +567,7 @@ vuint32m8_t test_vsll_vx_u32m8(vuint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -576,7 +577,7 @@ vuint64m1_t test_vsll_vv_u64m1(vuint64m1_t op1, vuint64m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv1i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -586,7 +587,7 @@ vuint64m1_t test_vsll_vx_u64m1(vuint64m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -596,7 +597,7 @@ vuint64m2_t test_vsll_vv_u64m2(vuint64m2_t op1, vuint64m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -606,7 +607,7 @@ vuint64m2_t test_vsll_vx_u64m2(vuint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -616,7 +617,7 @@ vuint64m4_t test_vsll_vv_u64m4(vuint64m4_t op1, vuint64m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -626,7 +627,7 @@ vuint64m4_t test_vsll_vx_u64m4(vuint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -636,7 +637,7 @@ vuint64m8_t test_vsll_vv_u64m8(vuint64m8_t op1, vuint64m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsll_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsll.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsra.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsra.c index b680aad81464c7..e4099e6d1e3580 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsra.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsra.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1_t test_vsra_vv_i8m1(vint8m1_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint8m1_t test_vsra_vx_i8m1(vint8m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint8m2_t test_vsra_vv_i8m2(vint8m2_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint8m2_t test_vsra_vx_i8m2(vint8m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint8m4_t test_vsra_vv_i8m4(vint8m4_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv32i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint8m4_t test_vsra_vx_i8m4(vint8m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint8m8_t test_vsra_vv_i8m8(vint8m8_t op1, vuint8m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv64i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint8m8_t test_vsra_vx_i8m8(vint8m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m1_t test_vsra_vv_i16m1(vint16m1_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m1_t test_vsra_vx_i16m1(vint16m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m2_t test_vsra_vv_i16m2(vint16m2_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m2_t test_vsra_vx_i16m2(vint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m4_t test_vsra_vv_i16m4(vint16m4_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint16m4_t test_vsra_vx_i16m4(vint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint16m8_t test_vsra_vv_i16m8(vint16m8_t op1, vuint16m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint16m8_t test_vsra_vx_i16m8(vint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m1_t test_vsra_vv_i32m1(vint32m1_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv2i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m1_t test_vsra_vx_i32m1(vint32m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m2_t test_vsra_vv_i32m2(vint32m2_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m2_t test_vsra_vx_i32m2(vint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vsra_vv_i32m4(vint32m4_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vsra_vx_i32m4(vint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vsra_vv_i32m8(vint32m8_t op1, vuint32m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vsra_vx_i32m8(vint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint64m1_t test_vsra_vv_i64m1(vint64m1_t op1, vuint64m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv1i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint64m1_t test_vsra_vx_i64m1(vint64m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m2_t test_vsra_vv_i64m2(vint64m2_t op1, vuint64m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vsra_vx_i64m2(vint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m4_t test_vsra_vv_i64m4(vint64m4_t op1, vuint64m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m4_t test_vsra_vx_i64m4(vint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m8_t test_vsra_vv_i64m8(vint64m8_t op1, vuint64m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsra_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsra.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsrl.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsrl.c index f1bbbeb18516c6..b2b4f992545b09 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsrl.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-witdth-bit-shift/wrappers/vsrl.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint8m1_t test_vsrl_vv_u8m1(vuint8m1_t op1, vuint8m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u8m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint8m1_t test_vsrl_vx_u8m1(vuint8m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint8m2_t test_vsrl_vv_u8m2(vuint8m2_t op1, vuint8m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u8m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint8m2_t test_vsrl_vx_u8m2(vuint8m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint8m4_t test_vsrl_vv_u8m4(vuint8m4_t op1, vuint8m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u8m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv32i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint8m4_t test_vsrl_vx_u8m4(vuint8m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint8m8_t test_vsrl_vv_u8m8(vuint8m8_t op1, vuint8m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u8m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv64i8.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint8m8_t test_vsrl_vx_u8m8(vuint8m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m1_t test_vsrl_vv_u16m1(vuint16m1_t op1, vuint16m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u16m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m1_t test_vsrl_vx_u16m1(vuint16m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m2_t test_vsrl_vv_u16m2(vuint16m2_t op1, vuint16m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m2_t test_vsrl_vx_u16m2(vuint16m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m4_t test_vsrl_vv_u16m4(vuint16m4_t op1, vuint16m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint16m4_t test_vsrl_vx_u16m4(vuint16m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint16m8_t test_vsrl_vv_u16m8(vuint16m8_t op1, vuint16m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv32i16.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint16m8_t test_vsrl_vx_u16m8(vuint16m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m1_t test_vsrl_vv_u32m1(vuint32m1_t op1, vuint32m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv2i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m1_t test_vsrl_vx_u32m1(vuint32m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m2_t test_vsrl_vv_u32m2(vuint32m2_t op1, vuint32m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m2_t test_vsrl_vx_u32m2(vuint32m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vsrl_vv_u32m4(vuint32m4_t op1, vuint32m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vsrl_vx_u32m4(vuint32m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vsrl_vv_u32m8(vuint32m8_t op1, vuint32m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv16i32.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vsrl_vx_u32m8(vuint32m8_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint64m1_t test_vsrl_vv_u64m1(vuint64m1_t op1, vuint64m1_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv1i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint64m1_t test_vsrl_vx_u64m1(vuint64m1_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m2_t test_vsrl_vv_u64m2(vuint64m2_t op1, vuint64m2_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv2i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vsrl_vx_u64m2(vuint64m2_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m4_t test_vsrl_vv_u64m4(vuint64m4_t op1, vuint64m4_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv4i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m4_t test_vsrl_vx_u64m4(vuint64m4_t op1, size_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m8_t test_vsrl_vv_u64m8(vuint64m8_t op1, vuint64m8_t shift, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vsrl_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[SHIFT:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsrl.nxv8i64.i64.i64( poison, [[OP1]], i64 [[SHIFT]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwadd.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwadd.c index 10c4f08caabba6..5053771df16c9b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwadd.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwadd.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint16m1_t test_vwadd_wx_i16m1(vint16m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint16m2_t test_vwadd_vv_i16m2(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint16m2_t test_vwadd_vx_i16m2(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i16.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint16m2_t test_vwadd_wv_i16m2(vint16m2_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m2_t test_vwadd_wx_i16m2(vint16m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m4_t test_vwadd_vv_i16m4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_vwadd_vx_i16m4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m4_t test_vwadd_wv_i16m4(vint16m4_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv16i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m4_t test_vwadd_wx_i16m4(vint16m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m8_t test_vwadd_vv_i16m8(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m8_t test_vwadd_vx_i16m8(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m8_t test_vwadd_wv_i16m8(vint16m8_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv32i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m8_t test_vwadd_wx_i16m8(vint16m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv2i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint32m1_t test_vwadd_wx_i32m1(vint32m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint32m2_t test_vwadd_vv_i32m2(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint32m2_t test_vwadd_vx_i32m2(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m2_t test_vwadd_wv_i32m2(vint32m2_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv4i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m2_t test_vwadd_wx_i32m2(vint32m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m4_t test_vwadd_vv_i32m4(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m4_t test_vwadd_vx_i32m4(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vwadd_wv_i32m4(vint32m4_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vwadd_wx_i32m4(vint32m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vwadd_vv_i32m8(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vwadd_vx_i32m8(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m8_t test_vwadd_wv_i32m8(vint32m8_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv16i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m8_t test_vwadd_wx_i32m8(vint32m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv1i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m1_t test_vwadd_wx_i64m1(vint64m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vwadd_vv_i64m2(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m2_t test_vwadd_vx_i64m2(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m2_t test_vwadd_wv_i64m2(vint64m2_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv2i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m2_t test_vwadd_wx_i64m2(vint64m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m4_t test_vwadd_vv_i64m4(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint64m4_t test_vwadd_vx_i64m4(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint64m4_t test_vwadd_wv_i64m4(vint64m4_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv4i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint64m4_t test_vwadd_wx_i64m4(vint64m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint64m8_t test_vwadd_vv_i64m8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint64m8_t test_vwadd_vx_i64m8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint64m8_t test_vwadd_wv_i64m8(vint64m8_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwaddu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwaddu.c index 81963c90346365..27833bbcd9eb1f 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwaddu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwaddu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint16m1_t test_vwaddu_wx_u16m1(vuint16m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint16m2_t test_vwaddu_vv_u16m2(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint16m2_t test_vwaddu_vx_u16m2(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i16.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint16m2_t test_vwaddu_wv_u16m2(vuint16m2_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m2_t test_vwaddu_wx_u16m2(vuint16m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m4_t test_vwaddu_vv_u16m4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_vwaddu_vx_u16m4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m4_t test_vwaddu_wv_u16m4(vuint16m4_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv16i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m4_t test_vwaddu_wx_u16m4(vuint16m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m8_t test_vwaddu_vv_u16m8(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m8_t test_vwaddu_vx_u16m8(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m8_t test_vwaddu_wv_u16m8(vuint16m8_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv32i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m8_t test_vwaddu_wx_u16m8(vuint16m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv2i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint32m1_t test_vwaddu_wx_u32m1(vuint32m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint32m2_t test_vwaddu_vv_u32m2(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint32m2_t test_vwaddu_vx_u32m2(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m2_t test_vwaddu_wv_u32m2(vuint32m2_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv4i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m2_t test_vwaddu_wx_u32m2(vuint32m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m4_t test_vwaddu_vv_u32m4(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m4_t test_vwaddu_vx_u32m4(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vwaddu_wv_u32m4(vuint32m4_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vwaddu_wx_u32m4(vuint32m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vwaddu_vv_u32m8(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vwaddu_vx_u32m8(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m8_t test_vwaddu_wv_u32m8(vuint32m8_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv16i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m8_t test_vwaddu_wx_u32m8(vuint32m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv1i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m1_t test_vwaddu_wx_u64m1(vuint64m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vwaddu_vv_u64m2(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m2_t test_vwaddu_vx_u64m2(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_vwaddu_wv_u64m2(vuint64m2_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv2i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m2_t test_vwaddu_wx_u64m2(vuint64m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m4_t test_vwaddu_vv_u64m4(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint64m4_t test_vwaddu_vx_u64m4(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint64m4_t test_vwaddu_wv_u64m4(vuint64m4_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv4i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint64m4_t test_vwaddu_wx_u64m4(vuint64m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint64m8_t test_vwaddu_vv_u64m8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint64m8_t test_vwaddu_vx_u64m8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint64m8_t test_vwaddu_wv_u64m8(vuint64m8_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwsub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwsub.c index d1c5c2ab6ce820..06bdbf03445912 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwsub.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwsub.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint16m1_t test_vwsub_wx_i16m1(vint16m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint16m2_t test_vwsub_vv_i16m2(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint16m2_t test_vwsub_vx_i16m2(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i16.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint16m2_t test_vwsub_wv_i16m2(vint16m2_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m2_t test_vwsub_wx_i16m2(vint16m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m4_t test_vwsub_vv_i16m4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_vwsub_vx_i16m4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m4_t test_vwsub_wv_i16m4(vint16m4_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv16i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m4_t test_vwsub_wx_i16m4(vint16m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m8_t test_vwsub_vv_i16m8(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m8_t test_vwsub_vx_i16m8(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m8_t test_vwsub_wv_i16m8(vint16m8_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv32i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m8_t test_vwsub_wx_i16m8(vint16m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv2i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint32m1_t test_vwsub_wx_i32m1(vint32m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint32m2_t test_vwsub_vv_i32m2(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint32m2_t test_vwsub_vx_i32m2(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m2_t test_vwsub_wv_i32m2(vint32m2_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv4i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m2_t test_vwsub_wx_i32m2(vint32m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m4_t test_vwsub_vv_i32m4(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m4_t test_vwsub_vx_i32m4(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vwsub_wv_i32m4(vint32m4_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vwsub_wx_i32m4(vint32m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vwsub_vv_i32m8(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vwsub_vx_i32m8(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m8_t test_vwsub_wv_i32m8(vint32m8_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv16i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m8_t test_vwsub_wx_i32m8(vint32m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv1i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m1_t test_vwsub_wx_i64m1(vint64m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vwsub_vv_i64m2(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m2_t test_vwsub_vx_i64m2(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m2_t test_vwsub_wv_i64m2(vint64m2_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv2i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m2_t test_vwsub_wx_i64m2(vint64m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m4_t test_vwsub_vv_i64m4(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint64m4_t test_vwsub_vx_i64m4(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint64m4_t test_vwsub_wv_i64m4(vint64m4_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv4i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint64m4_t test_vwsub_wx_i64m4(vint64m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint64m8_t test_vwsub_vv_i64m8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint64m8_t test_vwsub_vx_i64m8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint64m8_t test_vwsub_wv_i64m8(vint64m8_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwsubu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwsubu.c index 37b6bd6687aab7..0bfbb2fd46cfca 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwsubu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/thead/vwsubu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint16m1_t test_vwsubu_wx_u16m1(vuint16m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint16m2_t test_vwsubu_vv_u16m2(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint16m2_t test_vwsubu_vx_u16m2(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i16.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint16m2_t test_vwsubu_wv_u16m2(vuint16m2_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m2_t test_vwsubu_wx_u16m2(vuint16m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m4_t test_vwsubu_vv_u16m4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_vwsubu_vx_u16m4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m4_t test_vwsubu_wv_u16m4(vuint16m4_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv16i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m4_t test_vwsubu_wx_u16m4(vuint16m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m8_t test_vwsubu_vv_u16m8(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m8_t test_vwsubu_vx_u16m8(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m8_t test_vwsubu_wv_u16m8(vuint16m8_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv32i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m8_t test_vwsubu_wx_u16m8(vuint16m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv2i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint32m1_t test_vwsubu_wx_u32m1(vuint32m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint32m2_t test_vwsubu_vv_u32m2(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint32m2_t test_vwsubu_vx_u32m2(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m2_t test_vwsubu_wv_u32m2(vuint32m2_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv4i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m2_t test_vwsubu_wx_u32m2(vuint32m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m4_t test_vwsubu_vv_u32m4(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m4_t test_vwsubu_vx_u32m4(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vwsubu_wv_u32m4(vuint32m4_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vwsubu_wx_u32m4(vuint32m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vwsubu_vv_u32m8(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vwsubu_vx_u32m8(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m8_t test_vwsubu_wv_u32m8(vuint32m8_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv16i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m8_t test_vwsubu_wx_u32m8(vuint32m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv1i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m1_t test_vwsubu_wx_u64m1(vuint64m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vwsubu_vv_u64m2(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m2_t test_vwsubu_vx_u64m2(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_vwsubu_wv_u64m2(vuint64m2_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv2i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m2_t test_vwsubu_wx_u64m2(vuint64m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m4_t test_vwsubu_vv_u64m4(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint64m4_t test_vwsubu_vx_u64m4(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint64m4_t test_vwsubu_wv_u64m4(vuint64m4_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv4i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint64m4_t test_vwsubu_wx_u64m4(vuint64m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint64m8_t test_vwsubu_vv_u64m8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint64m8_t test_vwsubu_vx_u64m8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint64m8_t test_vwsubu_wv_u64m8(vuint64m8_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwadd.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwadd.c index e77a1b06470d2c..d954abe2bf7fcf 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwadd.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwadd.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint16m1_t test_vwadd_wx_i16m1(vint16m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint16m2_t test_vwadd_vv_i16m2(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint16m2_t test_vwadd_vx_i16m2(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i16.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint16m2_t test_vwadd_wv_i16m2(vint16m2_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m2_t test_vwadd_wx_i16m2(vint16m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m4_t test_vwadd_vv_i16m4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_vwadd_vx_i16m4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m4_t test_vwadd_wv_i16m4(vint16m4_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv16i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m4_t test_vwadd_wx_i16m4(vint16m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m8_t test_vwadd_vv_i16m8(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m8_t test_vwadd_vx_i16m8(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m8_t test_vwadd_wv_i16m8(vint16m8_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv32i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m8_t test_vwadd_wx_i16m8(vint16m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv2i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint32m1_t test_vwadd_wx_i32m1(vint32m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint32m2_t test_vwadd_vv_i32m2(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint32m2_t test_vwadd_vx_i32m2(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m2_t test_vwadd_wv_i32m2(vint32m2_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv4i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m2_t test_vwadd_wx_i32m2(vint32m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m4_t test_vwadd_vv_i32m4(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m4_t test_vwadd_vx_i32m4(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vwadd_wv_i32m4(vint32m4_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vwadd_wx_i32m4(vint32m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vwadd_vv_i32m8(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vwadd_vx_i32m8(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m8_t test_vwadd_wv_i32m8(vint32m8_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv16i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m8_t test_vwadd_wx_i32m8(vint32m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv1i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m1_t test_vwadd_wx_i64m1(vint64m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vwadd_vv_i64m2(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m2_t test_vwadd_vx_i64m2(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m2_t test_vwadd_wv_i64m2(vint64m2_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv2i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m2_t test_vwadd_wx_i64m2(vint64m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m4_t test_vwadd_vv_i64m4(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint64m4_t test_vwadd_vx_i64m4(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint64m4_t test_vwadd_wv_i64m4(vint64m4_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv4i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint64m4_t test_vwadd_wx_i64m4(vint64m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint64m8_t test_vwadd_vv_i64m8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint64m8_t test_vwadd_vx_i64m8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint64m8_t test_vwadd_wv_i64m8(vint64m8_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwadd_wx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwadd.w.nxv8i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwaddu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwaddu.c index 1553704f9b6bb8..ce2458f642ff25 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwaddu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwaddu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint16m1_t test_vwaddu_wx_u16m1(vuint16m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint16m2_t test_vwaddu_vv_u16m2(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint16m2_t test_vwaddu_vx_u16m2(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i16.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint16m2_t test_vwaddu_wv_u16m2(vuint16m2_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m2_t test_vwaddu_wx_u16m2(vuint16m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m4_t test_vwaddu_vv_u16m4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_vwaddu_vx_u16m4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m4_t test_vwaddu_wv_u16m4(vuint16m4_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv16i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m4_t test_vwaddu_wx_u16m4(vuint16m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m8_t test_vwaddu_vv_u16m8(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m8_t test_vwaddu_vx_u16m8(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m8_t test_vwaddu_wv_u16m8(vuint16m8_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv32i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m8_t test_vwaddu_wx_u16m8(vuint16m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv2i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint32m1_t test_vwaddu_wx_u32m1(vuint32m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint32m2_t test_vwaddu_vv_u32m2(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint32m2_t test_vwaddu_vx_u32m2(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m2_t test_vwaddu_wv_u32m2(vuint32m2_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv4i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m2_t test_vwaddu_wx_u32m2(vuint32m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m4_t test_vwaddu_vv_u32m4(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m4_t test_vwaddu_vx_u32m4(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vwaddu_wv_u32m4(vuint32m4_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vwaddu_wx_u32m4(vuint32m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vwaddu_vv_u32m8(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vwaddu_vx_u32m8(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m8_t test_vwaddu_wv_u32m8(vuint32m8_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv16i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m8_t test_vwaddu_wx_u32m8(vuint32m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv1i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m1_t test_vwaddu_wx_u64m1(vuint64m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vwaddu_vv_u64m2(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m2_t test_vwaddu_vx_u64m2(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_vwaddu_wv_u64m2(vuint64m2_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv2i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m2_t test_vwaddu_wx_u64m2(vuint64m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m4_t test_vwaddu_vv_u64m4(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint64m4_t test_vwaddu_vx_u64m4(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint64m4_t test_vwaddu_wv_u64m4(vuint64m4_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv4i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint64m4_t test_vwaddu_wx_u64m4(vuint64m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint64m8_t test_vwaddu_vv_u64m8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint64m8_t test_vwaddu_vx_u64m8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint64m8_t test_vwaddu_wv_u64m8(vuint64m8_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwaddu_wx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwaddu.w.nxv8i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwsub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwsub.c index c42e7ef37a1a3b..1bb8dadc01dea3 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwsub.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwsub.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint16m1_t test_vwsub_wx_i16m1(vint16m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint16m2_t test_vwsub_vv_i16m2(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint16m2_t test_vwsub_vx_i16m2(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i16.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint16m2_t test_vwsub_wv_i16m2(vint16m2_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m2_t test_vwsub_wx_i16m2(vint16m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m4_t test_vwsub_vv_i16m4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint16m4_t test_vwsub_vx_i16m4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint16m4_t test_vwsub_wv_i16m4(vint16m4_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv16i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint16m4_t test_vwsub_wx_i16m4(vint16m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m8_t test_vwsub_vv_i16m8(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m8_t test_vwsub_vx_i16m8(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m8_t test_vwsub_wv_i16m8(vint16m8_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv32i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint16m8_t test_vwsub_wx_i16m8(vint16m8_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv2i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint32m1_t test_vwsub_wx_i32m1(vint32m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint32m2_t test_vwsub_vv_i32m2(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint32m2_t test_vwsub_vx_i32m2(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint32m2_t test_vwsub_wv_i32m2(vint32m2_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv4i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint32m2_t test_vwsub_wx_i32m2(vint32m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint32m4_t test_vwsub_vv_i32m4(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint32m4_t test_vwsub_vx_i32m4(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint32m4_t test_vwsub_wv_i32m4(vint32m4_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint32m4_t test_vwsub_wx_i32m4(vint32m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint32m8_t test_vwsub_vv_i32m8(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint32m8_t test_vwsub_vx_i32m8(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m8_t test_vwsub_wv_i32m8(vint32m8_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv16i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m8_t test_vwsub_wx_i32m8(vint32m8_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv1i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint64m1_t test_vwsub_wx_i64m1(vint64m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint64m2_t test_vwsub_vv_i64m2(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint64m2_t test_vwsub_vx_i64m2(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint64m2_t test_vwsub_wv_i64m2(vint64m2_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv2i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m2_t test_vwsub_wx_i64m2(vint64m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m4_t test_vwsub_vv_i64m4(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint64m4_t test_vwsub_vx_i64m4(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint64m4_t test_vwsub_wv_i64m4(vint64m4_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv4i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint64m4_t test_vwsub_wx_i64m4(vint64m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vint64m8_t test_vwsub_vv_i64m8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vint64m8_t test_vwsub_vx_i64m8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vint64m8_t test_vwsub_wv_i64m8(vint64m8_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsub_wx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsub.w.nxv8i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwsubu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwsubu.c index b2f7874fed35d9..9215a528433156 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwsubu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-add/wrappers/vwsubu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint16m1_t test_vwsubu_wx_u16m1(vuint16m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint16m2_t test_vwsubu_vv_u16m2(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint16m2_t test_vwsubu_vx_u16m2(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i16.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint16m2_t test_vwsubu_wv_u16m2(vuint16m2_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m2_t test_vwsubu_wx_u16m2(vuint16m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m4_t test_vwsubu_vv_u16m4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint16m4_t test_vwsubu_vx_u16m4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv16i16.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint16m4_t test_vwsubu_wv_u16m4(vuint16m4_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv16i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint16m4_t test_vwsubu_wx_u16m4(vuint16m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint16m8_t test_vwsubu_vv_u16m8(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint16m8_t test_vwsubu_vx_u16m8(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv32i16.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint16m8_t test_vwsubu_wv_u16m8(vuint16m8_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv32i16.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint16m8_t test_vwsubu_wx_u16m8(vuint16m8_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u32m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv2i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint32m1_t test_vwsubu_wx_u32m1(vuint32m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint32m2_t test_vwsubu_vv_u32m2(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint32m2_t test_vwsubu_vx_u32m2(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv4i32.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint32m2_t test_vwsubu_wv_u32m2(vuint32m2_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv4i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint32m2_t test_vwsubu_wx_u32m2(vuint32m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint32m4_t test_vwsubu_vv_u32m4(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint32m4_t test_vwsubu_vx_u32m4(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i32.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint32m4_t test_vwsubu_wv_u32m4(vuint32m4_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint32m4_t test_vwsubu_wx_u32m4(vuint32m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint32m8_t test_vwsubu_vv_u32m8(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint32m8_t test_vwsubu_vx_u32m8(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv16i32.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m8_t test_vwsubu_wv_u32m8(vuint32m8_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv16i32.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m8_t test_vwsubu_wx_u32m8(vuint32m8_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u64m1 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv1i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint64m1_t test_vwsubu_wx_u64m1(vuint64m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint64m2_t test_vwsubu_vv_u64m2(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint64m2_t test_vwsubu_vx_u64m2(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv2i64.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint64m2_t test_vwsubu_wv_u64m2(vuint64m2_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv2i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m2_t test_vwsubu_wx_u64m2(vuint64m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m4_t test_vwsubu_vv_u64m4(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint64m4_t test_vwsubu_vx_u64m4(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv4i64.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint64m4_t test_vwsubu_wv_u64m4(vuint64m4_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv4i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint64m4_t test_vwsubu_wx_u64m4(vuint64m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -366,7 +367,7 @@ vuint64m8_t test_vwsubu_vv_u64m8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -376,7 +377,7 @@ vuint64m8_t test_vwsubu_vx_u64m8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i64.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -386,7 +387,7 @@ vuint64m8_t test_vwsubu_wv_u64m8(vuint64m8_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwsubu_wx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwsubu.w.nxv8i64.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmacc.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmacc.c index 18dc89ede393cb..4acec0076eacab 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmacc.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmacc.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint16m2_t test_vwmacc_vv_i16m2(vint16m2_t vd, vint8m1_t vs1, vint8m1_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv8i16.i8.nxv8i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint16m2_t test_vwmacc_vx_i16m2(vint16m2_t vd, int8_t rs1, vint8m1_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv16i16.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint16m4_t test_vwmacc_vv_i16m4(vint16m4_t vd, vint8m2_t vs1, vint8m2_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv16i16.i8.nxv16i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint16m4_t test_vwmacc_vx_i16m4(vint16m4_t vd, int8_t rs1, vint8m2_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv32i16.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m8_t test_vwmacc_vv_i16m8(vint16m8_t vd, vint8m4_t vs1, vint8m4_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv32i16.i8.nxv32i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m8_t test_vwmacc_vx_i16m8(vint16m8_t vd, int8_t rs1, vint8m4_t vs2, size_t } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv4i32.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint32m2_t test_vwmacc_vv_i32m2(vint32m2_t vd, vint16m1_t vs1, vint16m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv4i32.i16.nxv4i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint32m2_t test_vwmacc_vx_i32m2(vint32m2_t vd, int16_t rs1, vint16m1_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv8i32.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m4_t test_vwmacc_vv_i32m4(vint32m4_t vd, vint16m2_t vs1, vint16m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv8i32.i16.nxv8i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m4_t test_vwmacc_vx_i32m4(vint32m4_t vd, int16_t rs1, vint16m2_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv16i32.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m8_t test_vwmacc_vv_i32m8(vint32m8_t vd, vint16m4_t vs1, vint16m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv16i32.i16.nxv16i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_vwmacc_vx_i32m8(vint32m8_t vd, int16_t rs1, vint16m4_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv2i64.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m2_t test_vwmacc_vv_i64m2(vint64m2_t vd, vint32m1_t vs1, vint32m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv2i64.i32.nxv2i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_vwmacc_vx_i64m2(vint64m2_t vd, int32_t rs1, vint32m1_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv4i64.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_vwmacc_vv_i64m4(vint64m4_t vd, vint32m2_t vs1, vint32m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv4i64.i32.nxv4i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m4_t test_vwmacc_vx_i64m4(vint64m4_t vd, int32_t rs1, vint32m2_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv8i64.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint64m8_t test_vwmacc_vv_i64m8(vint64m8_t vd, vint32m4_t vs1, vint32m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.nxv8i64.i32.nxv8i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint64m8_t test_vwmacc_vx_i64m8(vint64m8_t vd, int32_t rs1, vint32m4_t vs2, size } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv8i16.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint16m2_t test_vwmacc_vv_i16m2_m(vbool8_t mask, vint16m2_t vd, vint8m1_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv8i16.i8.nxv8i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint16m2_t test_vwmacc_vx_i16m2_m(vbool8_t mask, vint16m2_t vd, int8_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv16i16.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint16m4_t test_vwmacc_vv_i16m4_m(vbool4_t mask, vint16m4_t vd, vint8m2_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv16i16.i8.nxv16i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint16m4_t test_vwmacc_vx_i16m4_m(vbool4_t mask, vint16m4_t vd, int8_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv32i16.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint16m8_t test_vwmacc_vv_i16m8_m(vbool2_t mask, vint16m8_t vd, vint8m4_t vs1, v } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv32i16.i8.nxv32i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint16m8_t test_vwmacc_vx_i16m8_m(vbool2_t mask, vint16m8_t vd, int8_t rs1, vint } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv4i32.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m2_t test_vwmacc_vv_i32m2_m(vbool16_t mask, vint32m2_t vd, vint16m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv4i32.i16.nxv4i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m2_t test_vwmacc_vx_i32m2_m(vbool16_t mask, vint32m2_t vd, int16_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv8i32.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint32m4_t test_vwmacc_vv_i32m4_m(vbool8_t mask, vint32m4_t vd, vint16m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv8i32.i16.nxv8i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint32m4_t test_vwmacc_vx_i32m4_m(vbool8_t mask, vint32m4_t vd, int16_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv16i32.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint32m8_t test_vwmacc_vv_i32m8_m(vbool4_t mask, vint32m8_t vd, vint16m4_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv16i32.i16.nxv16i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint32m8_t test_vwmacc_vx_i32m8_m(vbool4_t mask, vint32m8_t vd, int16_t rs1, vin } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv2i64.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m2_t test_vwmacc_vv_i64m2_m(vbool32_t mask, vint64m2_t vd, vint32m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv2i64.i32.nxv2i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m2_t test_vwmacc_vx_i64m2_m(vbool32_t mask, vint64m2_t vd, int32_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv4i64.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint64m4_t test_vwmacc_vv_i64m4_m(vbool16_t mask, vint64m4_t vd, vint32m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv4i64.i32.nxv4i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint64m4_t test_vwmacc_vx_i64m4_m(vbool16_t mask, vint64m4_t vd, int32_t rs1, vi } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv8i64.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint64m8_t test_vwmacc_vv_i64m8_m(vbool8_t mask, vint64m8_t vd, vint32m4_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmacc_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmacc.mask.nxv8i64.i32.nxv8i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccsu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccsu.c index 3a36e546ed4608..09e92c3d66e492 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccsu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccsu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint16m2_t test_vwmaccsu_vv_i16m2(vint16m2_t vd, vint8m1_t vs1, vuint8m1_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv8i16.i8.nxv8i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint16m2_t test_vwmaccsu_vx_i16m2(vint16m2_t vd, int8_t rs1, vuint8m1_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv16i16.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint16m4_t test_vwmaccsu_vv_i16m4(vint16m4_t vd, vint8m2_t vs1, vuint8m2_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv16i16.i8.nxv16i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint16m4_t test_vwmaccsu_vx_i16m4(vint16m4_t vd, int8_t rs1, vuint8m2_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv32i16.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m8_t test_vwmaccsu_vv_i16m8(vint16m8_t vd, vint8m4_t vs1, vuint8m4_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv32i16.i8.nxv32i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m8_t test_vwmaccsu_vx_i16m8(vint16m8_t vd, int8_t rs1, vuint8m4_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv4i32.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint32m2_t test_vwmaccsu_vv_i32m2(vint32m2_t vd, vint16m1_t vs1, vuint16m1_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv4i32.i16.nxv4i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint32m2_t test_vwmaccsu_vx_i32m2(vint32m2_t vd, int16_t rs1, vuint16m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv8i32.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m4_t test_vwmaccsu_vv_i32m4(vint32m4_t vd, vint16m2_t vs1, vuint16m2_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv8i32.i16.nxv8i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m4_t test_vwmaccsu_vx_i32m4(vint32m4_t vd, int16_t rs1, vuint16m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv16i32.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m8_t test_vwmaccsu_vv_i32m8(vint32m8_t vd, vint16m4_t vs1, vuint16m4_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv16i32.i16.nxv16i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_vwmaccsu_vx_i32m8(vint32m8_t vd, int16_t rs1, vuint16m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv2i64.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m2_t test_vwmaccsu_vv_i64m2(vint64m2_t vd, vint32m1_t vs1, vuint32m1_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv2i64.i32.nxv2i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_vwmaccsu_vx_i64m2(vint64m2_t vd, int32_t rs1, vuint32m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv4i64.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_vwmaccsu_vv_i64m4(vint64m4_t vd, vint32m2_t vs1, vuint32m2_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv4i64.i32.nxv4i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m4_t test_vwmaccsu_vx_i64m4(vint64m4_t vd, int32_t rs1, vuint32m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv8i64.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint64m8_t test_vwmaccsu_vv_i64m8(vint64m8_t vd, vint32m4_t vs1, vuint32m4_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.nxv8i64.i32.nxv8i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint64m8_t test_vwmaccsu_vx_i64m8(vint64m8_t vd, int32_t rs1, vuint32m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv8i16.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint16m2_t test_vwmaccsu_vv_i16m2_m(vbool8_t mask, vint16m2_t vd, vint8m1_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv8i16.i8.nxv8i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint16m2_t test_vwmaccsu_vx_i16m2_m(vbool8_t mask, vint16m2_t vd, int8_t rs1, vu } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv16i16.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint16m4_t test_vwmaccsu_vv_i16m4_m(vbool4_t mask, vint16m4_t vd, vint8m2_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv16i16.i8.nxv16i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint16m4_t test_vwmaccsu_vx_i16m4_m(vbool4_t mask, vint16m4_t vd, int8_t rs1, vu } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv32i16.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint16m8_t test_vwmaccsu_vv_i16m8_m(vbool2_t mask, vint16m8_t vd, vint8m4_t vs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv32i16.i8.nxv32i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint16m8_t test_vwmaccsu_vx_i16m8_m(vbool2_t mask, vint16m8_t vd, int8_t rs1, vu } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv4i32.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m2_t test_vwmaccsu_vv_i32m2_m(vbool16_t mask, vint32m2_t vd, vint16m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv4i32.i16.nxv4i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m2_t test_vwmaccsu_vx_i32m2_m(vbool16_t mask, vint32m2_t vd, int16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv8i32.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint32m4_t test_vwmaccsu_vv_i32m4_m(vbool8_t mask, vint32m4_t vd, vint16m2_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv8i32.i16.nxv8i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint32m4_t test_vwmaccsu_vx_i32m4_m(vbool8_t mask, vint32m4_t vd, int16_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv16i32.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint32m8_t test_vwmaccsu_vv_i32m8_m(vbool4_t mask, vint32m8_t vd, vint16m4_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv16i32.i16.nxv16i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint32m8_t test_vwmaccsu_vx_i32m8_m(vbool4_t mask, vint32m8_t vd, int16_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv2i64.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m2_t test_vwmaccsu_vv_i64m2_m(vbool32_t mask, vint64m2_t vd, vint32m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv2i64.i32.nxv2i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m2_t test_vwmaccsu_vx_i64m2_m(vbool32_t mask, vint64m2_t vd, int32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv4i64.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint64m4_t test_vwmaccsu_vv_i64m4_m(vbool16_t mask, vint64m4_t vd, vint32m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv4i64.i32.nxv4i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint64m4_t test_vwmaccsu_vx_i64m4_m(vbool16_t mask, vint64m4_t vd, int32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv8i64.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint64m8_t test_vwmaccsu_vv_i64m8_m(vbool8_t mask, vint64m8_t vd, vint32m4_t vs1 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccsu_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccsu.mask.nxv8i64.i32.nxv8i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccu.c index e568981e877e5b..3b546b53579332 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint16m2_t test_vwmaccu_vv_u16m2(vuint16m2_t vd, vuint8m1_t vs1, vuint8m1_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u16m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv8i16.i8.nxv8i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint16m2_t test_vwmaccu_vx_u16m2(vuint16m2_t vd, uint8_t rs1, vuint8m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv16i16.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint16m4_t test_vwmaccu_vv_u16m4(vuint16m4_t vd, vuint8m2_t vs1, vuint8m2_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv16i16.i8.nxv16i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint16m4_t test_vwmaccu_vx_u16m4(vuint16m4_t vd, uint8_t rs1, vuint8m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv32i16.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m8_t test_vwmaccu_vv_u16m8(vuint16m8_t vd, vuint8m4_t vs1, vuint8m4_t vs2 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv32i16.i8.nxv32i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m8_t test_vwmaccu_vx_u16m8(vuint16m8_t vd, uint8_t rs1, vuint8m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv4i32.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint32m2_t test_vwmaccu_vv_u32m2(vuint32m2_t vd, vuint16m1_t vs1, vuint16m1_t v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv4i32.i16.nxv4i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint32m2_t test_vwmaccu_vx_u32m2(vuint32m2_t vd, uint16_t rs1, vuint16m1_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv8i32.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m4_t test_vwmaccu_vv_u32m4(vuint32m4_t vd, vuint16m2_t vs1, vuint16m2_t v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv8i32.i16.nxv8i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m4_t test_vwmaccu_vx_u32m4(vuint32m4_t vd, uint16_t rs1, vuint16m2_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv16i32.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m8_t test_vwmaccu_vv_u32m8(vuint32m8_t vd, vuint16m4_t vs1, vuint16m4_t v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv16i32.i16.nxv16i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_vwmaccu_vx_u32m8(vuint32m8_t vd, uint16_t rs1, vuint16m4_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv2i64.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m2_t test_vwmaccu_vv_u64m2(vuint64m2_t vd, vuint32m1_t vs1, vuint32m1_t v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv2i64.i32.nxv2i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_vwmaccu_vx_u64m2(vuint64m2_t vd, uint32_t rs1, vuint32m1_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv4i64.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_vwmaccu_vv_u64m4(vuint64m4_t vd, vuint32m2_t vs1, vuint32m2_t v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv4i64.i32.nxv4i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m4_t test_vwmaccu_vx_u64m4(vuint64m4_t vd, uint32_t rs1, vuint32m2_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv8i64.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint64m8_t test_vwmaccu_vv_u64m8(vuint64m8_t vd, vuint32m4_t vs1, vuint32m4_t v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.nxv8i64.i32.nxv8i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint64m8_t test_vwmaccu_vx_u64m8(vuint64m8_t vd, uint32_t rs1, vuint32m4_t vs2, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv8i16.nxv8i8.nxv8i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint16m2_t test_vwmaccu_vv_u16m2_m(vbool8_t mask, vuint16m2_t vd, vuint8m1_t vs } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv8i16.i8.nxv8i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint16m2_t test_vwmaccu_vx_u16m2_m(vbool8_t mask, vuint16m2_t vd, uint8_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv16i16.nxv16i8.nxv16i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m4_t test_vwmaccu_vv_u16m4_m(vbool4_t mask, vuint16m4_t vd, vuint8m2_t vs } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv16i16.i8.nxv16i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m4_t test_vwmaccu_vx_u16m4_m(vbool4_t mask, vuint16m4_t vd, uint8_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv32i16.nxv32i8.nxv32i8.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m8_t test_vwmaccu_vv_u16m8_m(vbool2_t mask, vuint16m8_t vd, vuint8m4_t vs } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv32i16.i8.nxv32i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m8_t test_vwmaccu_vx_u16m8_m(vbool2_t mask, vuint16m8_t vd, uint8_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv4i32.nxv4i16.nxv4i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m2_t test_vwmaccu_vv_u32m2_m(vbool16_t mask, vuint32m2_t vd, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv4i32.i16.nxv4i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m2_t test_vwmaccu_vx_u32m2_m(vbool16_t mask, vuint32m2_t vd, uint16_t rs1 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv8i32.nxv8i16.nxv8i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m4_t test_vwmaccu_vv_u32m4_m(vbool8_t mask, vuint32m4_t vd, vuint16m2_t v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv8i32.i16.nxv8i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m4_t test_vwmaccu_vx_u32m4_m(vbool8_t mask, vuint32m4_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv16i32.nxv16i16.nxv16i16.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint32m8_t test_vwmaccu_vv_u32m8_m(vbool4_t mask, vuint32m8_t vd, vuint16m4_t v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv16i32.i16.nxv16i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint32m8_t test_vwmaccu_vx_u32m8_m(vbool4_t mask, vuint32m8_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv2i64.nxv2i32.nxv2i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m2_t test_vwmaccu_vv_u64m2_m(vbool32_t mask, vuint64m2_t vd, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv2i64.i32.nxv2i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m2_t test_vwmaccu_vx_u64m2_m(vbool32_t mask, vuint64m2_t vd, uint32_t rs1 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv4i64.nxv4i32.nxv4i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint64m4_t test_vwmaccu_vv_u64m4_m(vbool16_t mask, vuint64m4_t vd, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv4i64.i32.nxv4i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint64m4_t test_vwmaccu_vx_u64m4_m(vbool16_t mask, vuint64m4_t vd, uint32_t rs1 } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], [[VS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv8i64.nxv8i32.nxv8i32.i64( [[VD]], [[VS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint64m8_t test_vwmaccu_vv_u64m8_m(vbool8_t mask, vuint64m8_t vd, vuint32m4_t v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccu_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccu.mask.nxv8i64.i32.nxv8i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccus.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccus.c index 4d53e7d09201cb..5131878470fbd5 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccus.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul-add/thead/vwmaccus.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint16m2_t test_vwmaccus_vx_i16m2(vint16m2_t vd, uint8_t rs1, vint8m1_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i16m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.nxv16i16.i8.nxv16i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint16m4_t test_vwmaccus_vx_i16m4(vint16m4_t vd, uint8_t rs1, vint8m2_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i16m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.nxv32i16.i8.nxv32i8.i64( [[VD]], i8 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint16m8_t test_vwmaccus_vx_i16m8(vint16m8_t vd, uint8_t rs1, vint8m4_t vs2, siz } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i32m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.nxv4i32.i16.nxv4i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint32m2_t test_vwmaccus_vx_i32m2(vint32m2_t vd, uint16_t rs1, vint16m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i32m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.nxv8i32.i16.nxv8i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint32m4_t test_vwmaccus_vx_i32m4(vint32m4_t vd, uint16_t rs1, vint16m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i32m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.nxv16i32.i16.nxv16i16.i64( [[VD]], i16 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint32m8_t test_vwmaccus_vx_i32m8(vint32m8_t vd, uint16_t rs1, vint16m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i64m2 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.nxv2i64.i32.nxv2i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint64m2_t test_vwmaccus_vx_i64m2(vint64m2_t vd, uint32_t rs1, vint32m1_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i64m4 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.nxv4i64.i32.nxv4i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint64m4_t test_vwmaccus_vx_i64m4(vint64m4_t vd, uint32_t rs1, vint32m2_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i64m8 -// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.nxv8i64.i32.nxv8i32.i64( [[VD]], i32 [[RS1]], [[VS2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint64m8_t test_vwmaccus_vx_i64m8(vint64m8_t vd, uint32_t rs1, vint32m4_t vs2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.mask.nxv8i16.i8.nxv8i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint16m2_t test_vwmaccus_vx_i16m2_m(vbool8_t mask, vint16m2_t vd, uint8_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.mask.nxv16i16.i8.nxv16i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint16m4_t test_vwmaccus_vx_i16m4_m(vbool4_t mask, vint16m4_t vd, uint8_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i8 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.mask.nxv32i16.i8.nxv32i8.i64( [[VD]], i8 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint16m8_t test_vwmaccus_vx_i16m8_m(vbool2_t mask, vint16m8_t vd, uint8_t rs1, v } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.mask.nxv4i32.i16.nxv4i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint32m2_t test_vwmaccus_vx_i32m2_m(vbool16_t mask, vint32m2_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.mask.nxv8i32.i16.nxv8i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint32m4_t test_vwmaccus_vx_i32m4_m(vbool8_t mask, vint32m4_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i16 noundef zeroext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.mask.nxv16i32.i16.nxv16i16.i64( [[VD]], i16 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint32m8_t test_vwmaccus_vx_i32m8_m(vbool4_t mask, vint32m8_t vd, uint16_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.mask.nxv2i64.i32.nxv2i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m2_t test_vwmaccus_vx_i64m2_m(vbool32_t mask, vint64m2_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.mask.nxv4i64.i32.nxv4i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint64m4_t test_vwmaccus_vx_i64m4_m(vbool16_t mask, vint64m4_t vd, uint32_t rs1, } // CHECK-RV64-LABEL: define dso_local @test_vwmaccus_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VD:%.*]], i32 noundef signext [[RS1:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmaccus.mask.nxv8i64.i32.nxv8i32.i64( [[VD]], i32 [[RS1]], [[VS2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmul.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmul.c index 623b05b49f517b..7d744094b9339a 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmul.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmul.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint16m2_t test_vwmul_vv_i16m2(vint8m1_t op1, vint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint16m2_t test_vwmul_vx_i16m2(vint8m1_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint16m4_t test_vwmul_vv_i16m4(vint8m2_t op1, vint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint16m4_t test_vwmul_vx_i16m4(vint8m2_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m8_t test_vwmul_vv_i16m8(vint8m4_t op1, vint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m8_t test_vwmul_vx_i16m8(vint8m4_t op1, int8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint32m2_t test_vwmul_vv_i32m2(vint16m1_t op1, vint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint32m2_t test_vwmul_vx_i32m2(vint16m1_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m4_t test_vwmul_vv_i32m4(vint16m2_t op1, vint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m4_t test_vwmul_vx_i32m4(vint16m2_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m8_t test_vwmul_vv_i32m8(vint16m4_t op1, vint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_vwmul_vx_i32m8(vint16m4_t op1, int16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m2_t test_vwmul_vv_i64m2(vint32m1_t op1, vint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_vwmul_vx_i64m2(vint32m1_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_vwmul_vv_i64m4(vint32m2_t op1, vint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m4_t test_vwmul_vx_i64m4(vint32m2_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint64m8_t test_vwmul_vv_i64m8(vint32m4_t op1, vint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint64m8_t test_vwmul_vx_i64m8(vint32m4_t op1, int32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint16m2_t test_vwmul_vv_i16m2_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint16m2_t test_vwmul_vx_i16m2_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint16m4_t test_vwmul_vv_i16m4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint16m4_t test_vwmul_vx_i16m4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint16m8_t test_vwmul_vv_i16m8_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint16m8_t test_vwmul_vx_i16m8_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_ } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m2_t test_vwmul_vv_i32m2_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m2_t test_vwmul_vx_i32m2_m(vbool16_t mask, vint16m1_t op1, int16_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint32m4_t test_vwmul_vv_i32m4_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint32m4_t test_vwmul_vx_i32m4_m(vbool8_t mask, vint16m2_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint32m8_t test_vwmul_vv_i32m8_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint32m8_t test_vwmul_vx_i32m8_m(vbool4_t mask, vint16m4_t op1, int16_t op2, siz } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m2_t test_vwmul_vv_i64m2_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m2_t test_vwmul_vx_i64m2_m(vbool32_t mask, vint32m1_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint64m4_t test_vwmul_vv_i64m4_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint64m4_t test_vwmul_vx_i64m4_m(vbool16_t mask, vint32m2_t op1, int32_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint64m8_t test_vwmul_vv_i64m8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmul_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmul.mask.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmulsu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmulsu.c index 999a51f04a5f58..8716f031a39f72 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmulsu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmulsu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint16m2_t test_vwmulsu_vv_i16m2(vint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vint16m2_t test_vwmulsu_vx_i16m2(vint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vint16m4_t test_vwmulsu_vv_i16m4(vint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vint16m4_t test_vwmulsu_vx_i16m4(vint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vint16m8_t test_vwmulsu_vv_i16m8(vint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vint16m8_t test_vwmulsu_vx_i16m8(vint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vint32m2_t test_vwmulsu_vv_i32m2(vint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vint32m2_t test_vwmulsu_vx_i32m2(vint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vint32m4_t test_vwmulsu_vv_i32m4(vint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vint32m4_t test_vwmulsu_vx_i32m4(vint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vint32m8_t test_vwmulsu_vv_i32m8(vint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vint32m8_t test_vwmulsu_vx_i32m8(vint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vint64m2_t test_vwmulsu_vv_i64m2(vint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vint64m2_t test_vwmulsu_vx_i64m2(vint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vint64m4_t test_vwmulsu_vv_i64m4(vint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vint64m4_t test_vwmulsu_vx_i64m4(vint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vint64m8_t test_vwmulsu_vv_i64m8(vint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vint64m8_t test_vwmulsu_vx_i64m8(vint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vint16m2_t test_vwmulsu_vv_i16m2_m(vbool8_t mask, vint8m1_t op1, vuint8m1_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vint16m2_t test_vwmulsu_vx_i16m2_m(vbool8_t mask, vint8m1_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vint16m4_t test_vwmulsu_vv_i16m4_m(vbool4_t mask, vint8m2_t op1, vuint8m2_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vint16m4_t test_vwmulsu_vx_i16m4_m(vbool4_t mask, vint8m2_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vint16m8_t test_vwmulsu_vv_i16m8_m(vbool2_t mask, vint8m4_t op1, vuint8m4_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vint16m8_t test_vwmulsu_vx_i16m8_m(vbool2_t mask, vint8m4_t op1, uint8_t op2, si } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vint32m2_t test_vwmulsu_vv_i32m2_m(vbool16_t mask, vint16m1_t op1, vuint16m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vint32m2_t test_vwmulsu_vx_i32m2_m(vbool16_t mask, vint16m1_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vint32m4_t test_vwmulsu_vv_i32m4_m(vbool8_t mask, vint16m2_t op1, vuint16m2_t op } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vint32m4_t test_vwmulsu_vx_i32m4_m(vbool8_t mask, vint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vint32m8_t test_vwmulsu_vv_i32m8_m(vbool4_t mask, vint16m4_t op1, vuint16m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vint32m8_t test_vwmulsu_vx_i32m8_m(vbool4_t mask, vint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vint64m2_t test_vwmulsu_vv_i64m2_m(vbool32_t mask, vint32m1_t op1, vuint32m1_t o } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vint64m2_t test_vwmulsu_vx_i64m2_m(vbool32_t mask, vint32m1_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vint64m4_t test_vwmulsu_vv_i64m4_m(vbool16_t mask, vint32m2_t op1, vuint32m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vint64m4_t test_vwmulsu_vx_i64m4_m(vbool16_t mask, vint32m2_t op1, uint32_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vv_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vint64m8_t test_vwmulsu_vv_i64m8_m(vbool8_t mask, vint32m4_t op1, vuint32m4_t op } // CHECK-RV64-LABEL: define dso_local @test_vwmulsu_vx_i64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulsu.mask.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmulu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmulu.c index e4ed9c077c4f55..80863e6942b5bd 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmulu.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-widening-mul/thead/vwmulu.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vuint16m2_t test_vwmulu_vv_u16m2(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u16m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +27,7 @@ vuint16m2_t test_vwmulu_vx_u16m2(vuint8m1_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +37,7 @@ vuint16m4_t test_vwmulu_vv_u16m4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u16m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +47,7 @@ vuint16m4_t test_vwmulu_vx_u16m4(vuint8m2_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +57,7 @@ vuint16m8_t test_vwmulu_vv_u16m8(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u16m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +67,7 @@ vuint16m8_t test_vwmulu_vx_u16m8(vuint8m4_t op1, uint8_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +77,7 @@ vuint32m2_t test_vwmulu_vv_u32m2(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u32m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +87,7 @@ vuint32m2_t test_vwmulu_vx_u32m2(vuint16m1_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +97,7 @@ vuint32m4_t test_vwmulu_vv_u32m4(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u32m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +107,7 @@ vuint32m4_t test_vwmulu_vx_u32m4(vuint16m2_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +117,7 @@ vuint32m8_t test_vwmulu_vv_u32m8(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u32m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -126,7 +127,7 @@ vuint32m8_t test_vwmulu_vx_u32m8(vuint16m4_t op1, uint16_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -136,7 +137,7 @@ vuint64m2_t test_vwmulu_vv_u64m2(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u64m2 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -146,7 +147,7 @@ vuint64m2_t test_vwmulu_vx_u64m2(vuint32m1_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -156,7 +157,7 @@ vuint64m4_t test_vwmulu_vv_u64m4(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u64m4 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -166,7 +167,7 @@ vuint64m4_t test_vwmulu_vx_u64m4(vuint32m2_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -176,7 +177,7 @@ vuint64m8_t test_vwmulu_vv_u64m8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u64m8 -// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -186,7 +187,7 @@ vuint64m8_t test_vwmulu_vx_u64m8(vuint32m4_t op1, uint32_t op2, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -196,7 +197,7 @@ vuint16m2_t test_vwmulu_vv_u16m2_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u16m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv8i16.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -206,7 +207,7 @@ vuint16m2_t test_vwmulu_vx_u16m2_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -216,7 +217,7 @@ vuint16m4_t test_vwmulu_vv_u16m4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u16m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv16i16.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -226,7 +227,7 @@ vuint16m4_t test_vwmulu_vx_u16m4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -236,7 +237,7 @@ vuint16m8_t test_vwmulu_vv_u16m8_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u16m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv32i16.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -246,7 +247,7 @@ vuint16m8_t test_vwmulu_vx_u16m8_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, s } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -256,7 +257,7 @@ vuint32m2_t test_vwmulu_vv_u32m2_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u32m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv4i32.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -266,7 +267,7 @@ vuint32m2_t test_vwmulu_vx_u32m2_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -276,7 +277,7 @@ vuint32m4_t test_vwmulu_vv_u32m4_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t o } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u32m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv8i32.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -286,7 +287,7 @@ vuint32m4_t test_vwmulu_vx_u32m4_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -296,7 +297,7 @@ vuint32m8_t test_vwmulu_vv_u32m8_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u32m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv16i32.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -306,7 +307,7 @@ vuint32m8_t test_vwmulu_vx_u32m8_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -316,7 +317,7 @@ vuint64m2_t test_vwmulu_vv_u64m2_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u64m2_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv2i64.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -326,7 +327,7 @@ vuint64m2_t test_vwmulu_vx_u64m2_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -336,7 +337,7 @@ vuint64m4_t test_vwmulu_vv_u64m4_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u64m4_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv4i64.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -346,7 +347,7 @@ vuint64m4_t test_vwmulu_vx_u64m4_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2 } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vv_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -356,7 +357,7 @@ vuint64m8_t test_vwmulu_vv_u64m8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t o } // CHECK-RV64-LABEL: define dso_local @test_vwmulu_vx_u64m8_m -// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vwmulu.mask.nxv8i64.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2b.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2b.c index 3f9988848a8977..56b033269438b8 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2b.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2b.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e16.c index e518c3601552f7..1df1f620c057ca 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1x2_t test_th_vlseg2e16_v_f16m1x2(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16_v_f16m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv8f16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m2x2_t test_th_vlseg2e16_v_f16m2x2(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16_v_f16m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv16f16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -36,7 +37,7 @@ vfloat16m4x2_t test_th_vlseg2e16_v_f16m4x2(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16_v_i16m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv4i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -46,7 +47,7 @@ vint16m1x2_t test_th_vlseg2e16_v_i16m1x2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16_v_i16m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv8i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -56,7 +57,7 @@ vint16m2x2_t test_th_vlseg2e16_v_i16m2x2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16_v_i16m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv16i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -66,7 +67,7 @@ vint16m4x2_t test_th_vlseg2e16_v_i16m4x2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16_v_u16m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv4i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -76,7 +77,7 @@ vuint16m1x2_t test_th_vlseg2e16_v_u16m1x2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16_v_u16m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv8i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -86,7 +87,7 @@ vuint16m2x2_t test_th_vlseg2e16_v_u16m2x2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16_v_u16m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv16i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e16ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e16ff.c index 28449279958e77..e3414a5b74e188 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e16ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e16ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -22,7 +23,7 @@ vfloat16m1x2_t test_th_vlseg2e16ff_v_f16m1x2(const _Float16 *base, size_t *new_v } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16ff_v_f16m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv8f16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -38,7 +39,7 @@ vfloat16m2x2_t test_th_vlseg2e16ff_v_f16m2x2(const _Float16 *base, size_t *new_v } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16ff_v_f16m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv16f16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -54,7 +55,7 @@ vfloat16m4x2_t test_th_vlseg2e16ff_v_f16m4x2(const _Float16 *base, size_t *new_v } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16ff_v_i16m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv4i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -70,7 +71,7 @@ vint16m1x2_t test_th_vlseg2e16ff_v_i16m1x2(const int16_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16ff_v_i16m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv8i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -86,7 +87,7 @@ vint16m2x2_t test_th_vlseg2e16ff_v_i16m2x2(const int16_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16ff_v_i16m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv16i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -102,7 +103,7 @@ vint16m4x2_t test_th_vlseg2e16ff_v_i16m4x2(const int16_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16ff_v_u16m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv4i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -118,7 +119,7 @@ vuint16m1x2_t test_th_vlseg2e16ff_v_u16m1x2(const uint16_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16ff_v_u16m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv8i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -134,7 +135,7 @@ vuint16m2x2_t test_th_vlseg2e16ff_v_u16m2x2(const uint16_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e16ff_v_u16m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv16i16.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e32.c index 3d585e15ccedb4..114ea7a0f657ee 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1x2_t test_th_vlseg2e32_v_f32m1x2(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32_v_f32m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv4f32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -26,7 +27,7 @@ vfloat32m2x2_t test_th_vlseg2e32_v_f32m2x2(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32_v_f32m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv8f32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -36,7 +37,7 @@ vfloat32m4x2_t test_th_vlseg2e32_v_f32m4x2(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32_v_i32m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv2i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -46,7 +47,7 @@ vint32m1x2_t test_th_vlseg2e32_v_i32m1x2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32_v_i32m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv4i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -56,7 +57,7 @@ vint32m2x2_t test_th_vlseg2e32_v_i32m2x2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32_v_i32m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv8i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -66,7 +67,7 @@ vint32m4x2_t test_th_vlseg2e32_v_i32m4x2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32_v_u32m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv2i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -76,7 +77,7 @@ vuint32m1x2_t test_th_vlseg2e32_v_u32m1x2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32_v_u32m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv4i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -86,7 +87,7 @@ vuint32m2x2_t test_th_vlseg2e32_v_u32m2x2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32_v_u32m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv8i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e32ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e32ff.c index dd62acfcaa463d..f4ba4fa46851ae 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e32ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e32ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -22,7 +23,7 @@ vfloat32m1x2_t test_th_vlseg2e32ff_v_f32m1x2(const float *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32ff_v_f32m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv4f32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -38,7 +39,7 @@ vfloat32m2x2_t test_th_vlseg2e32ff_v_f32m2x2(const float *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32ff_v_f32m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv8f32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -54,7 +55,7 @@ vfloat32m4x2_t test_th_vlseg2e32ff_v_f32m4x2(const float *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32ff_v_i32m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv2i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -70,7 +71,7 @@ vint32m1x2_t test_th_vlseg2e32ff_v_i32m1x2(const int32_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32ff_v_i32m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv4i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -86,7 +87,7 @@ vint32m2x2_t test_th_vlseg2e32ff_v_i32m2x2(const int32_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32ff_v_i32m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv8i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -102,7 +103,7 @@ vint32m4x2_t test_th_vlseg2e32ff_v_i32m4x2(const int32_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32ff_v_u32m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv2i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -118,7 +119,7 @@ vuint32m1x2_t test_th_vlseg2e32ff_v_u32m1x2(const uint32_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32ff_v_u32m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv4i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -134,7 +135,7 @@ vuint32m2x2_t test_th_vlseg2e32ff_v_u32m2x2(const uint32_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e32ff_v_u32m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv8i32.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e64.c index d520c29ae11912..c0cdc60c0f4043 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat64m1x2_t test_th_vlseg2e64_v_f64m1x2(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64_v_f64m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv2f64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -26,7 +27,7 @@ vfloat64m2x2_t test_th_vlseg2e64_v_f64m2x2(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64_v_f64m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv4f64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -36,7 +37,7 @@ vfloat64m4x2_t test_th_vlseg2e64_v_f64m4x2(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64_v_i64m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv1i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -46,7 +47,7 @@ vint64m1x2_t test_th_vlseg2e64_v_i64m1x2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64_v_i64m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv2i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -56,7 +57,7 @@ vint64m2x2_t test_th_vlseg2e64_v_i64m2x2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64_v_i64m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv4i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -66,7 +67,7 @@ vint64m4x2_t test_th_vlseg2e64_v_i64m4x2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64_v_u64m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv1i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -76,7 +77,7 @@ vuint64m1x2_t test_th_vlseg2e64_v_u64m1x2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64_v_u64m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv2i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -86,7 +87,7 @@ vuint64m2x2_t test_th_vlseg2e64_v_u64m2x2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64_v_u64m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv4i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e64ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e64ff.c index 4fce5607e5de65..e2d02105c44d53 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e64ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e64ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -22,7 +23,7 @@ vfloat64m1x2_t test_th_vlseg2e64ff_v_f64m1x2(const double *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64ff_v_f64m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv2f64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -38,7 +39,7 @@ vfloat64m2x2_t test_th_vlseg2e64ff_v_f64m2x2(const double *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64ff_v_f64m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv4f64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -54,7 +55,7 @@ vfloat64m4x2_t test_th_vlseg2e64ff_v_f64m4x2(const double *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64ff_v_i64m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv1i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -70,7 +71,7 @@ vint64m1x2_t test_th_vlseg2e64ff_v_i64m1x2(const int64_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64ff_v_i64m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv2i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -86,7 +87,7 @@ vint64m2x2_t test_th_vlseg2e64ff_v_i64m2x2(const int64_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64ff_v_i64m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv4i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -102,7 +103,7 @@ vint64m4x2_t test_th_vlseg2e64ff_v_i64m4x2(const int64_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64ff_v_u64m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv1i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -118,7 +119,7 @@ vuint64m1x2_t test_th_vlseg2e64ff_v_u64m1x2(const uint64_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64ff_v_u64m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv2i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -134,7 +135,7 @@ vuint64m2x2_t test_th_vlseg2e64ff_v_u64m2x2(const uint64_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e64ff_v_u64m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv4i64.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e8.c index ccbca53edc1108..d19c3ff1483d4e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1x2_t test_th_vlseg2e8_v_i8m1x2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e8_v_i8m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv16i8.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -26,7 +27,7 @@ vint8m2x2_t test_th_vlseg2e8_v_i8m2x2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e8_v_i8m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv32i8.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -36,7 +37,7 @@ vint8m4x2_t test_th_vlseg2e8_v_i8m4x2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e8_v_u8m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv8i8.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -46,7 +47,7 @@ vuint8m1x2_t test_th_vlseg2e8_v_u8m1x2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e8_v_u8m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv16i8.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] @@ -56,7 +57,7 @@ vuint8m2x2_t test_th_vlseg2e8_v_u8m2x2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e8_v_u8m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , } @llvm.riscv.th.vlseg2e.nxv32i8.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e8ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e8ff.c index a8750aa0a8c5fc..d9fb612a5e1b3b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e8ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg2e8ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -22,7 +23,7 @@ vint8m1x2_t test_th_vlseg2e8ff_v_i8m1x2(const int8_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e8ff_v_i8m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv16i8.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -38,7 +39,7 @@ vint8m2x2_t test_th_vlseg2e8ff_v_i8m2x2(const int8_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e8ff_v_i8m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv32i8.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -54,7 +55,7 @@ vint8m4x2_t test_th_vlseg2e8ff_v_i8m4x2(const int8_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e8ff_v_u8m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv8i8.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -70,7 +71,7 @@ vuint8m1x2_t test_th_vlseg2e8ff_v_u8m1x2(const uint8_t *base, size_t *new_vl, si } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e8ff_v_u8m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv16i8.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 @@ -86,7 +87,7 @@ vuint8m2x2_t test_th_vlseg2e8ff_v_u8m2x2(const uint8_t *base, size_t *new_vl, si } // CHECK-RV64-LABEL: define dso_local { , } @test_th_vlseg2e8ff_v_u8m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , i64 } @llvm.riscv.th.vlseg2eff.nxv32i8.i64( poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e16.c index aa3d4455dc91a6..52fceb3253c81a 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1x3_t test_th_vlseg3e16_v_f16m1x3(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e16_v_f16m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv8f16.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m2x3_t test_th_vlseg3e16_v_f16m2x3(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e16_v_i16m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv4i16.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -36,7 +37,7 @@ vint16m1x3_t test_th_vlseg3e16_v_i16m1x3(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e16_v_i16m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv8i16.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -46,7 +47,7 @@ vint16m2x3_t test_th_vlseg3e16_v_i16m2x3(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e16_v_u16m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv4i16.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1x3_t test_th_vlseg3e16_v_u16m1x3(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e16_v_u16m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv8i16.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e16ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e16ff.c index 399ccc2440dabf..07a5630ff2eb02 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e16ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e16ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -24,7 +25,7 @@ vfloat16m1x3_t test_th_vlseg3e16ff_v_f16m1x3(const _Float16 *base, size_t *new_v } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e16ff_v_f16m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv8f16.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -42,7 +43,7 @@ vfloat16m2x3_t test_th_vlseg3e16ff_v_f16m2x3(const _Float16 *base, size_t *new_v } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e16ff_v_i16m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv4i16.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -60,7 +61,7 @@ vint16m1x3_t test_th_vlseg3e16ff_v_i16m1x3(const int16_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e16ff_v_i16m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv8i16.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -78,7 +79,7 @@ vint16m2x3_t test_th_vlseg3e16ff_v_i16m2x3(const int16_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e16ff_v_u16m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv4i16.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -96,7 +97,7 @@ vuint16m1x3_t test_th_vlseg3e16ff_v_u16m1x3(const uint16_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e16ff_v_u16m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv8i16.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e32.c index cb59c40b769a89..80e31ba2de843c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1x3_t test_th_vlseg3e32_v_f32m1x3(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e32_v_f32m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv4f32.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -26,7 +27,7 @@ vfloat32m2x3_t test_th_vlseg3e32_v_f32m2x3(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e32_v_i32m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv2i32.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -36,7 +37,7 @@ vint32m1x3_t test_th_vlseg3e32_v_i32m1x3(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e32_v_i32m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv4i32.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -46,7 +47,7 @@ vint32m2x3_t test_th_vlseg3e32_v_i32m2x3(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e32_v_u32m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv2i32.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -56,7 +57,7 @@ vuint32m1x3_t test_th_vlseg3e32_v_u32m1x3(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e32_v_u32m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv4i32.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e32ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e32ff.c index e23e09547113c7..b580156f690b5a 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e32ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e32ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -24,7 +25,7 @@ vfloat32m1x3_t test_th_vlseg3e32ff_v_f32m1x3(const float *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e32ff_v_f32m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv4f32.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -42,7 +43,7 @@ vfloat32m2x3_t test_th_vlseg3e32ff_v_f32m2x3(const float *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e32ff_v_i32m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv2i32.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -60,7 +61,7 @@ vint32m1x3_t test_th_vlseg3e32ff_v_i32m1x3(const int32_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e32ff_v_i32m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv4i32.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -78,7 +79,7 @@ vint32m2x3_t test_th_vlseg3e32ff_v_i32m2x3(const int32_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e32ff_v_u32m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv2i32.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -96,7 +97,7 @@ vuint32m1x3_t test_th_vlseg3e32ff_v_u32m1x3(const uint32_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e32ff_v_u32m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv4i32.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e64.c index d866773d04a9e1..c9c93b8fc431fd 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat64m1x3_t test_th_vlseg3e64_v_f64m1x3(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e64_v_f64m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv2f64.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -26,7 +27,7 @@ vfloat64m2x3_t test_th_vlseg3e64_v_f64m2x3(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e64_v_i64m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv1i64.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -36,7 +37,7 @@ vint64m1x3_t test_th_vlseg3e64_v_i64m1x3(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e64_v_i64m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv2i64.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -46,7 +47,7 @@ vint64m2x3_t test_th_vlseg3e64_v_i64m2x3(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e64_v_u64m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv1i64.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -56,7 +57,7 @@ vuint64m1x3_t test_th_vlseg3e64_v_u64m1x3(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e64_v_u64m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv2i64.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e64ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e64ff.c index 4d19a71cdaa94f..82b0d7a594ef14 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e64ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e64ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -24,7 +25,7 @@ vfloat64m1x3_t test_th_vlseg3e64ff_v_f64m1x3(const double *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e64ff_v_f64m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv2f64.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -42,7 +43,7 @@ vfloat64m2x3_t test_th_vlseg3e64ff_v_f64m2x3(const double *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e64ff_v_i64m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv1i64.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -60,7 +61,7 @@ vint64m1x3_t test_th_vlseg3e64ff_v_i64m1x3(const int64_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e64ff_v_i64m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv2i64.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -78,7 +79,7 @@ vint64m2x3_t test_th_vlseg3e64ff_v_i64m2x3(const int64_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e64ff_v_u64m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv1i64.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -96,7 +97,7 @@ vuint64m1x3_t test_th_vlseg3e64ff_v_u64m1x3(const uint64_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e64ff_v_u64m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv2i64.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e8.c index 77aede9507355c..d0bb9ba0d7fb06 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1x3_t test_th_vlseg3e8_v_i8m1x3(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e8_v_i8m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv16i8.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -26,7 +27,7 @@ vint8m2x3_t test_th_vlseg3e8_v_i8m2x3(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e8_v_u8m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv8i8.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] @@ -36,7 +37,7 @@ vuint8m1x3_t test_th_vlseg3e8_v_u8m1x3(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e8_v_u8m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , } @llvm.riscv.th.vlseg3e.nxv16i8.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e8ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e8ff.c index e32b4f9fd1d670..9b5c0e3536cfef 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e8ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg3e8ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -24,7 +25,7 @@ vint8m1x3_t test_th_vlseg3e8ff_v_i8m1x3(const int8_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e8ff_v_i8m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv16i8.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -42,7 +43,7 @@ vint8m2x3_t test_th_vlseg3e8ff_v_i8m2x3(const int8_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e8ff_v_u8m1x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv8i8.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 @@ -60,7 +61,7 @@ vuint8m1x3_t test_th_vlseg3e8ff_v_u8m1x3(const uint8_t *base, size_t *new_vl, si } // CHECK-RV64-LABEL: define dso_local { , , } @test_th_vlseg3e8ff_v_u8m2x3 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , i64 } @llvm.riscv.th.vlseg3eff.nxv16i8.i64( poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e16.c index 1da0e1561e69b8..bed56f73bb27dc 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1x4_t test_th_vlseg4e16_v_f16m1x4(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e16_v_f16m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv8f16.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -26,7 +27,7 @@ vfloat16m2x4_t test_th_vlseg4e16_v_f16m2x4(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e16_v_i16m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv4i16.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -36,7 +37,7 @@ vint16m1x4_t test_th_vlseg4e16_v_i16m1x4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e16_v_i16m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv8i16.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -46,7 +47,7 @@ vint16m2x4_t test_th_vlseg4e16_v_i16m2x4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e16_v_u16m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv4i16.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -56,7 +57,7 @@ vuint16m1x4_t test_th_vlseg4e16_v_u16m1x4(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e16_v_u16m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv8i16.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e16ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e16ff.c index a1fce9a7487921..65aff6629656b1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e16ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e16ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -26,7 +27,7 @@ vfloat16m1x4_t test_th_vlseg4e16ff_v_f16m1x4(const _Float16 *base, size_t *new_v } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e16ff_v_f16m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv8f16.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -46,7 +47,7 @@ vfloat16m2x4_t test_th_vlseg4e16ff_v_f16m2x4(const _Float16 *base, size_t *new_v } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e16ff_v_i16m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv4i16.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -66,7 +67,7 @@ vint16m1x4_t test_th_vlseg4e16ff_v_i16m1x4(const int16_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e16ff_v_i16m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv8i16.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -86,7 +87,7 @@ vint16m2x4_t test_th_vlseg4e16ff_v_i16m2x4(const int16_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e16ff_v_u16m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv4i16.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -106,7 +107,7 @@ vuint16m1x4_t test_th_vlseg4e16ff_v_u16m1x4(const uint16_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e16ff_v_u16m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv8i16.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e32.c index b2e48e37d4cac1..666077ce5a58b2 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1x4_t test_th_vlseg4e32_v_f32m1x4(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e32_v_f32m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv4f32.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -26,7 +27,7 @@ vfloat32m2x4_t test_th_vlseg4e32_v_f32m2x4(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e32_v_i32m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv2i32.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -36,7 +37,7 @@ vint32m1x4_t test_th_vlseg4e32_v_i32m1x4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e32_v_i32m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv4i32.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -46,7 +47,7 @@ vint32m2x4_t test_th_vlseg4e32_v_i32m2x4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e32_v_u32m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv2i32.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -56,7 +57,7 @@ vuint32m1x4_t test_th_vlseg4e32_v_u32m1x4(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e32_v_u32m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv4i32.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e32ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e32ff.c index f108a0302ac0cc..d51125aa1b7fb0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e32ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e32ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -27,7 +28,7 @@ vfloat32m1x4_t test_th_vlseg4e32ff_v_f32m1x4(const float *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e32ff_v_f32m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv4f32.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -47,7 +48,7 @@ vfloat32m2x4_t test_th_vlseg4e32ff_v_f32m2x4(const float *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e32ff_v_i32m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv2i32.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -67,7 +68,7 @@ vint32m1x4_t test_th_vlseg4e32ff_v_i32m1x4(const int32_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e32ff_v_i32m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv4i32.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -87,7 +88,7 @@ vint32m2x4_t test_th_vlseg4e32ff_v_i32m2x4(const int32_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e32ff_v_u32m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv2i32.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -107,7 +108,7 @@ vuint32m1x4_t test_th_vlseg4e32ff_v_u32m1x4(const uint32_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e32ff_v_u32m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv4i32.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e64.c index 8eb3dd61dbfc26..1d92ab402bb178 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat64m1x4_t test_th_vlseg4e64_v_f64m1x4(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e64_v_f64m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv2f64.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -26,7 +27,7 @@ vfloat64m2x4_t test_th_vlseg4e64_v_f64m2x4(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e64_v_i64m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv1i64.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -36,7 +37,7 @@ vint64m1x4_t test_th_vlseg4e64_v_i64m1x4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e64_v_i64m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv2i64.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -46,7 +47,7 @@ vint64m2x4_t test_th_vlseg4e64_v_i64m2x4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e64_v_u64m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv1i64.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -56,7 +57,7 @@ vuint64m1x4_t test_th_vlseg4e64_v_u64m1x4(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e64_v_u64m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv2i64.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e64ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e64ff.c index 60ce8fc78fcd9e..2e3b0e4ea3403f 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e64ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e64ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -26,7 +27,7 @@ vfloat64m1x4_t test_th_vlseg4e64ff_v_f64m1x4(const double *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e64ff_v_f64m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv2f64.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -46,7 +47,7 @@ vfloat64m2x4_t test_th_vlseg4e64ff_v_f64m2x4(const double *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e64ff_v_i64m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv1i64.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -66,7 +67,7 @@ vint64m1x4_t test_th_vlseg4e64ff_v_i64m1x4(const int64_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e64ff_v_i64m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv2i64.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -86,7 +87,7 @@ vint64m2x4_t test_th_vlseg4e64ff_v_i64m2x4(const int64_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e64ff_v_u64m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv1i64.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -106,7 +107,7 @@ vuint64m1x4_t test_th_vlseg4e64ff_v_u64m1x4(const uint64_t *base, size_t *new_vl } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e64ff_v_u64m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv2i64.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e8.c index 38fd28c13af2fd..b7f295f28a8171 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1x4_t test_th_vlseg4e8_v_i8m1x4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e8_v_i8m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv16i8.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint8m2x4_t test_th_vlseg4e8_v_i8m2x4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e8_v_u8m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv8i8.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] @@ -36,7 +37,7 @@ vuint8m1x4_t test_th_vlseg4e8_v_u8m1x4(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e8_v_u8m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , } @llvm.riscv.th.vlseg4e.nxv16i8.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e8ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e8ff.c index 34c31eb708cc17..599955ac6d5805 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e8ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg4e8ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -26,7 +27,7 @@ vint8m1x4_t test_th_vlseg4e8ff_v_i8m1x4(const int8_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e8ff_v_i8m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv16i8.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -46,7 +47,7 @@ vint8m2x4_t test_th_vlseg4e8ff_v_i8m2x4(const int8_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e8ff_v_u8m1x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv8i8.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 @@ -66,7 +67,7 @@ vuint8m1x4_t test_th_vlseg4e8ff_v_u8m1x4(const uint8_t *base, size_t *new_vl, si } // CHECK-RV64-LABEL: define dso_local { , , , } @test_th_vlseg4e8ff_v_u8m2x4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , i64 } @llvm.riscv.th.vlseg4eff.nxv16i8.i64( poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e16.c index d53e9232697126..23cc8b530cb9e0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1x5_t test_th_vlseg5e16_v_f16m1x5(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e16_v_i16m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , } @llvm.riscv.th.vlseg5e.nxv4i16.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint16m1x5_t test_th_vlseg5e16_v_i16m1x5(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e16_v_u16m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , } @llvm.riscv.th.vlseg5e.nxv4i16.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e16ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e16ff.c index d9ff41d52f41f5..9843dce0cf1da1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e16ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e16ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -28,7 +29,7 @@ vfloat16m1x5_t test_th_vlseg5e16ff_v_f16m1x5(const _Float16 *base, size_t *new_v } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e16ff_v_i16m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , i64 } @llvm.riscv.th.vlseg5eff.nxv4i16.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , i64 } [[TMP0]], 0 @@ -50,7 +51,7 @@ vint16m1x5_t test_th_vlseg5e16ff_v_i16m1x5(const int16_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e16ff_v_u16m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , i64 } @llvm.riscv.th.vlseg5eff.nxv4i16.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e32.c index 3de8b4fec4bac6..5860f3c390d492 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1x5_t test_th_vlseg5e32_v_f32m1x5(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e32_v_i32m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , } @llvm.riscv.th.vlseg5e.nxv2i32.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint32m1x5_t test_th_vlseg5e32_v_i32m1x5(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e32_v_u32m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , } @llvm.riscv.th.vlseg5e.nxv2i32.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e32ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e32ff.c index 6eeba68c58a9f6..29cb0aeb533407 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e32ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e32ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -28,7 +29,7 @@ vfloat32m1x5_t test_th_vlseg5e32ff_v_f32m1x5(const float *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e32ff_v_i32m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , i64 } @llvm.riscv.th.vlseg5eff.nxv2i32.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , i64 } [[TMP0]], 0 @@ -50,7 +51,7 @@ vint32m1x5_t test_th_vlseg5e32ff_v_i32m1x5(const int32_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e32ff_v_u32m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , i64 } @llvm.riscv.th.vlseg5eff.nxv2i32.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e64.c index fe91318f8e0269..b520110d32157c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat64m1x5_t test_th_vlseg5e64_v_f64m1x5(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e64_v_i64m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , } @llvm.riscv.th.vlseg5e.nxv1i64.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint64m1x5_t test_th_vlseg5e64_v_i64m1x5(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e64_v_u64m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , } @llvm.riscv.th.vlseg5e.nxv1i64.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e64ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e64ff.c index 2db1179e99d54f..f55780f65249a5 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e64ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e64ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -28,7 +29,7 @@ vfloat64m1x5_t test_th_vlseg5e64ff_v_f64m1x5(const double *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e64ff_v_i64m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , i64 } @llvm.riscv.th.vlseg5eff.nxv1i64.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , i64 } [[TMP0]], 0 @@ -50,7 +51,7 @@ vint64m1x5_t test_th_vlseg5e64ff_v_i64m1x5(const int64_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e64ff_v_u64m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , i64 } @llvm.riscv.th.vlseg5eff.nxv1i64.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e8.c index f7f0babc76396b..087f8e9ca48534 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1x5_t test_th_vlseg5e8_v_i8m1x5(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e8_v_u8m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , } @llvm.riscv.th.vlseg5e.nxv8i8.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e8ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e8ff.c index c9a1872e6a2126..48e658aaa9e359 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e8ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg5e8ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -28,7 +29,7 @@ vint8m1x5_t test_th_vlseg5e8ff_v_i8m1x5(const int8_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local { , , , , } @test_th_vlseg5e8ff_v_u8m1x5 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , i64 } @llvm.riscv.th.vlseg5eff.nxv8i8.i64( poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e16.c index e1cb30e8c1b590..afb2c29ea9ebb3 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1x6_t test_th_vlseg6e16_v_f16m1x6(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e16_v_i16m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , } @llvm.riscv.th.vlseg6e.nxv4i16.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint16m1x6_t test_th_vlseg6e16_v_i16m1x6(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e16_v_u16m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , } @llvm.riscv.th.vlseg6e.nxv4i16.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e16ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e16ff.c index 250f87e0b71a44..0bedc722aed34f 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e16ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e16ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -30,7 +31,7 @@ vfloat16m1x6_t test_th_vlseg6e16ff_v_f16m1x6(const _Float16 *base, size_t *new_v } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e16ff_v_i16m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , i64 } @llvm.riscv.th.vlseg6eff.nxv4i16.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , i64 } [[TMP0]], 0 @@ -54,7 +55,7 @@ vint16m1x6_t test_th_vlseg6e16ff_v_i16m1x6(const int16_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e16ff_v_u16m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , i64 } @llvm.riscv.th.vlseg6eff.nxv4i16.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e32.c index ba294915c86553..3c23b9320a61da 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1x6_t test_th_vlseg6e32_v_f32m1x6(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e32_v_i32m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , } @llvm.riscv.th.vlseg6e.nxv2i32.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint32m1x6_t test_th_vlseg6e32_v_i32m1x6(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e32_v_u32m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , } @llvm.riscv.th.vlseg6e.nxv2i32.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e32ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e32ff.c index 85fd576e0db600..578fe9c210f0c4 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e32ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e32ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -30,7 +31,7 @@ vfloat32m1x6_t test_th_vlseg6e32ff_v_f32m1x6(const float *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e32ff_v_i32m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , i64 } @llvm.riscv.th.vlseg6eff.nxv2i32.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , i64 } [[TMP0]], 0 @@ -54,7 +55,7 @@ vint32m1x6_t test_th_vlseg6e32ff_v_i32m1x6(const int32_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e32ff_v_u32m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , i64 } @llvm.riscv.th.vlseg6eff.nxv2i32.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e64.c index 4e1032a0a262ce..431a402b372a7c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat64m1x6_t test_th_vlseg6e64_v_f64m1x6(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e64_v_i64m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , } @llvm.riscv.th.vlseg6e.nxv1i64.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint64m1x6_t test_th_vlseg6e64_v_i64m1x6(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e64_v_u64m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , } @llvm.riscv.th.vlseg6e.nxv1i64.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e64ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e64ff.c index 37fcc0a90b6709..b205b44ce8ef3b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e64ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e64ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -30,7 +31,7 @@ vfloat64m1x6_t test_th_vlseg6e64ff_v_f64m1x6(const double *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e64ff_v_i64m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , i64 } @llvm.riscv.th.vlseg6eff.nxv1i64.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , i64 } [[TMP0]], 0 @@ -54,7 +55,7 @@ vint64m1x6_t test_th_vlseg6e64ff_v_i64m1x6(const int64_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e64ff_v_u64m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , i64 } @llvm.riscv.th.vlseg6eff.nxv1i64.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e8.c index 23ab7dfab540f0..3ed7c8c84cc338 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1x6_t test_th_vlseg6e8_v_i8m1x6(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e8_v_u8m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , } @llvm.riscv.th.vlseg6e.nxv8i8.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e8ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e8ff.c index be6039eb843751..d31a6e62fe3deb 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e8ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg6e8ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -30,7 +31,7 @@ vint8m1x6_t test_th_vlseg6e8ff_v_i8m1x6(const int8_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local { , , , , , } @test_th_vlseg6e8ff_v_u8m1x6 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , i64 } @llvm.riscv.th.vlseg6eff.nxv8i8.i64( poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e16.c index da2f9067135d2a..d095736eef3c1b 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1x7_t test_th_vlseg7e16_v_f16m1x7(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e16_v_i16m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , } @llvm.riscv.th.vlseg7e.nxv4i16.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint16m1x7_t test_th_vlseg7e16_v_i16m1x7(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e16_v_u16m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , } @llvm.riscv.th.vlseg7e.nxv4i16.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e16ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e16ff.c index 1ca8f424efad75..23b7b9afda5e0c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e16ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e16ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -32,7 +33,7 @@ vfloat16m1x7_t test_th_vlseg7e16ff_v_f16m1x7(const _Float16 *base, size_t *new_v } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e16ff_v_i16m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , i64 } @llvm.riscv.th.vlseg7eff.nxv4i16.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , i64 } [[TMP0]], 0 @@ -58,7 +59,7 @@ vint16m1x7_t test_th_vlseg7e16ff_v_i16m1x7(const int16_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e16ff_v_u16m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , i64 } @llvm.riscv.th.vlseg7eff.nxv4i16.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e32.c index 663c179fbc6c41..fa739f08ced008 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1x7_t test_th_vlseg7e32_v_f32m1x7(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e32_v_i32m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , } @llvm.riscv.th.vlseg7e.nxv2i32.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint32m1x7_t test_th_vlseg7e32_v_i32m1x7(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e32_v_u32m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , } @llvm.riscv.th.vlseg7e.nxv2i32.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e32ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e32ff.c index ec826c05a99ef5..6b38d8c1e2db6e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e32ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e32ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -32,7 +33,7 @@ vfloat32m1x7_t test_th_vlseg7e32ff_v_f32m1x7(const float *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e32ff_v_i32m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , i64 } @llvm.riscv.th.vlseg7eff.nxv2i32.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , i64 } [[TMP0]], 0 @@ -58,7 +59,7 @@ vint32m1x7_t test_th_vlseg7e32ff_v_i32m1x7(const int32_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e32ff_v_u32m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , i64 } @llvm.riscv.th.vlseg7eff.nxv2i32.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e64.c index d3655b9dc2becb..1811e3567c352a 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat64m1x7_t test_th_vlseg7e64_v_f64m1x7(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e64_v_i64m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , } @llvm.riscv.th.vlseg7e.nxv1i64.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint64m1x7_t test_th_vlseg7e64_v_i64m1x7(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e64_v_u64m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , } @llvm.riscv.th.vlseg7e.nxv1i64.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e64ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e64ff.c index ef8b5ecd0339cb..7625d582f8f427 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e64ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e64ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -32,7 +33,7 @@ vfloat64m1x7_t test_th_vlseg7e64ff_v_f64m1x7(const double *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e64ff_v_i64m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , i64 } @llvm.riscv.th.vlseg7eff.nxv1i64.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , i64 } [[TMP0]], 0 @@ -58,7 +59,7 @@ vint64m1x7_t test_th_vlseg7e64ff_v_i64m1x7(const int64_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e64ff_v_u64m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , i64 } @llvm.riscv.th.vlseg7eff.nxv1i64.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e8.c index 7df202fdbebdcd..396a152dc5e973 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1x7_t test_th_vlseg7e8_v_i8m1x7(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e8_v_u8m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , } @llvm.riscv.th.vlseg7e.nxv8i8.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e8ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e8ff.c index 500c0b08f91da2..ebf230b28abdfa 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e8ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg7e8ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -32,7 +33,7 @@ vint8m1x7_t test_th_vlseg7e8ff_v_i8m1x7(const int8_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local { , , , , , , } @test_th_vlseg7e8ff_v_u8m1x7 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , i64 } @llvm.riscv.th.vlseg7eff.nxv8i8.i64( poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e16.c index d3d5665c5d26a5..25740f647febf1 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e16.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat16m1x8_t test_th_vlseg8e16_v_f16m1x8(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e16_v_i16m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , } @llvm.riscv.th.vlseg8e.nxv4i16.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint16m1x8_t test_th_vlseg8e16_v_i16m1x8(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e16_v_u16m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , } @llvm.riscv.th.vlseg8e.nxv4i16.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e16ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e16ff.c index 60957546165cb8..a4a0c6e969cc30 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e16ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e16ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -34,7 +35,7 @@ vfloat16m1x8_t test_th_vlseg8e16ff_v_f16m1x8(const _Float16 *base, size_t *new_v } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e16ff_v_i16m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , , i64 } @llvm.riscv.th.vlseg8eff.nxv4i16.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , , i64 } [[TMP0]], 0 @@ -62,7 +63,7 @@ vint16m1x8_t test_th_vlseg8e16ff_v_i16m1x8(const int16_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e16ff_v_u16m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , , i64 } @llvm.riscv.th.vlseg8eff.nxv4i16.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e32.c index 82948eea184c95..0062d71932081d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e32.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat32m1x8_t test_th_vlseg8e32_v_f32m1x8(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e32_v_i32m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , } @llvm.riscv.th.vlseg8e.nxv2i32.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint32m1x8_t test_th_vlseg8e32_v_i32m1x8(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e32_v_u32m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , } @llvm.riscv.th.vlseg8e.nxv2i32.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e32ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e32ff.c index 92ce7cb9a8c2f6..f30129c11609d0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e32ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e32ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -34,7 +35,7 @@ vfloat32m1x8_t test_th_vlseg8e32ff_v_f32m1x8(const float *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e32ff_v_i32m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , , i64 } @llvm.riscv.th.vlseg8eff.nxv2i32.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , , i64 } [[TMP0]], 0 @@ -62,7 +63,7 @@ vint32m1x8_t test_th_vlseg8e32ff_v_i32m1x8(const int32_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e32ff_v_u32m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , , i64 } @llvm.riscv.th.vlseg8eff.nxv2i32.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e64.c index d9099e3297d5a7..6787c9873bed8c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e64.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vfloat64m1x8_t test_th_vlseg8e64_v_f64m1x8(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e64_v_i64m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , } @llvm.riscv.th.vlseg8e.nxv1i64.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , , } [[TMP0]] @@ -26,7 +27,7 @@ vint64m1x8_t test_th_vlseg8e64_v_i64m1x8(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e64_v_u64m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , } @llvm.riscv.th.vlseg8e.nxv1i64.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e64ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e64ff.c index aad8676361605c..f656c3b274a11c 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e64ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e64ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -34,7 +35,7 @@ vfloat64m1x8_t test_th_vlseg8e64ff_v_f64m1x8(const double *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e64ff_v_i64m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , , i64 } @llvm.riscv.th.vlseg8eff.nxv1i64.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , , i64 } [[TMP0]], 0 @@ -62,7 +63,7 @@ vint64m1x8_t test_th_vlseg8e64ff_v_i64m1x8(const int64_t *base, size_t *new_vl, } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e64ff_v_u64m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , , i64 } @llvm.riscv.th.vlseg8eff.nxv1i64.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e8.c index 2c0fc571890cd6..16415bc6091103 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -16,7 +17,7 @@ vint8m1x8_t test_th_vlseg8e8_v_i8m1x8(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e8_v_u8m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , } @llvm.riscv.th.vlseg8e.nxv8i8.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret { , , , , , , , } [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e8ff.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e8ff.c index bb595625836c1c..22f14aebd748d7 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e8ff.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vlseg8e8ff.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -34,7 +35,7 @@ vint8m1x8_t test_th_vlseg8e8ff_v_i8m1x8(const int8_t *base, size_t *new_vl, size } // CHECK-RV64-LABEL: define dso_local { , , , , , , , } @test_th_vlseg8e8ff_v_u8m1x8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { , , , , , , , , i64 } @llvm.riscv.th.vlseg8eff.nxv8i8.i64( poison, poison, poison, poison, poison, poison, poison, poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { , , , , , , , , i64 } [[TMP0]], 0 diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vsseg2e8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vsseg2e8.c index e80d3a7dc12985..c4f0e2cec8cb63 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vsseg2e8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/zvlsseg/unit-stride/vsseg2e8.c @@ -1,3 +1,4 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 // RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ // RUN: -disable-O0-optnone -emit-llvm %s -o - | \ // RUN: opt -S -passes=mem2reg | \ @@ -6,10 +7,10 @@ #include // CHECK-RV64-LABEL: define dso_local void @test_th_vsseg2e8_v_i8m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[TUPLE0:%.*]], [[TUPLE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[V_TUPLE_COERCE0:%.*]], [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[TUPLE0]], 0 -// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[TUPLE1]], 1 +// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[V_TUPLE_COERCE0]], 0 +// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[V_TUPLE_COERCE1]], 1 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { , } [[TMP1]], 0 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { , } [[TMP1]], 1 // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsseg2e.nxv8i8.i64( [[TMP2]], [[TMP3]], ptr [[BASE]], i64 [[VL]]) @@ -20,10 +21,10 @@ void test_th_vsseg2e8_v_i8m1x2(int8_t *base, vint8m1x2_t v_tuple, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsseg2e8_v_i8m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[TUPLE0:%.*]], [[TUPLE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[V_TUPLE_COERCE0:%.*]], [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[TUPLE0]], 0 -// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[TUPLE1]], 1 +// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[V_TUPLE_COERCE0]], 0 +// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[V_TUPLE_COERCE1]], 1 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { , } [[TMP1]], 0 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { , } [[TMP1]], 1 // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsseg2e.nxv16i8.i64( [[TMP2]], [[TMP3]], ptr [[BASE]], i64 [[VL]]) @@ -34,10 +35,10 @@ void test_th_vsseg2e8_v_i8m2x2(int8_t *base, vint8m2x2_t v_tuple, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsseg2e8_v_i8m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[TUPLE0:%.*]], [[TUPLE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[V_TUPLE_COERCE0:%.*]], [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[TUPLE0]], 0 -// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[TUPLE1]], 1 +// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[V_TUPLE_COERCE0]], 0 +// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[V_TUPLE_COERCE1]], 1 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { , } [[TMP1]], 0 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { , } [[TMP1]], 1 // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsseg2e.nxv32i8.i64( [[TMP2]], [[TMP3]], ptr [[BASE]], i64 [[VL]]) @@ -48,10 +49,10 @@ void test_th_vsseg2e8_v_i8m4x2(int8_t *base, vint8m4x2_t v_tuple, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsseg2e8_v_u8m1x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[TUPLE0:%.*]], [[TUPLE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[V_TUPLE_COERCE0:%.*]], [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[TUPLE0]], 0 -// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[TUPLE1]], 1 +// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[V_TUPLE_COERCE0]], 0 +// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[V_TUPLE_COERCE1]], 1 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { , } [[TMP1]], 0 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { , } [[TMP1]], 1 // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsseg2e.nxv8i8.i64( [[TMP2]], [[TMP3]], ptr [[BASE]], i64 [[VL]]) @@ -62,10 +63,10 @@ void test_th_vsseg2e8_v_u8m1x2(uint8_t *base, vuint8m1x2_t v_tuple, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsseg2e8_v_u8m2x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[TUPLE0:%.*]], [[TUPLE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[V_TUPLE_COERCE0:%.*]], [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[TUPLE0]], 0 -// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[TUPLE1]], 1 +// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[V_TUPLE_COERCE0]], 0 +// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[V_TUPLE_COERCE1]], 1 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { , } [[TMP1]], 0 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { , } [[TMP1]], 1 // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsseg2e.nxv16i8.i64( [[TMP2]], [[TMP3]], ptr [[BASE]], i64 [[VL]]) @@ -76,10 +77,10 @@ void test_th_vsseg2e8_v_u8m2x2(uint8_t *base, vuint8m2x2_t v_tuple, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vsseg2e8_v_u8m4x2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[TUPLE0:%.*]], [[TUPLE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[V_TUPLE_COERCE0:%.*]], [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[TUPLE0]], 0 -// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[TUPLE1]], 1 +// CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { , } poison, [[V_TUPLE_COERCE0]], 0 +// CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { , } [[TMP0]], [[V_TUPLE_COERCE1]], 1 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { , } [[TMP1]], 0 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { , } [[TMP1]], 1 // CHECK-RV64-NEXT: call void @llvm.riscv.th.vsseg2e.nxv32i8.i64( [[TMP2]], [[TMP3]], ptr [[BASE]], i64 [[VL]])