Skip to content

Commit

Permalink
[Clang][XTHeadVector] Support vbool16/32/64_t for vector mask opera…
Browse files Browse the repository at this point in the history
…tions (#122)

* [Clang][XTHeadVector] Support `vbool16/32/64_t` for vector mask operations

* [Clang][XTHeadVector] Support `vbool16/32/64_t` for vcpop/vfirst

* [Clang][XTHeadVector] support wrappers

* [Clang][XTHeadVector] fix `vpopc`

* [Clang][XTHeadVector] fix `vmsbf`, `vmsif`, `vmsof`

* [Clang][XTHeadVector] update tests
  • Loading branch information
imkiva authored Jun 24, 2024
1 parent ce6e5b0 commit 99ce8e8
Show file tree
Hide file tree
Showing 36 changed files with 1,580 additions and 9 deletions.
91 changes: 82 additions & 9 deletions clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td
Original file line number Diff line number Diff line change
Expand Up @@ -5477,105 +5477,178 @@ let HeaderCode =
#define __riscv_vmand_mm_b2(op1, op2, vl) __riscv_th_vmand_mm_b2(op1, op2, vl)
#define __riscv_vmand_mm_b4(op1, op2, vl) __riscv_th_vmand_mm_b4(op1, op2, vl)
#define __riscv_vmand_mm_b8(op1, op2, vl) __riscv_th_vmand_mm_b8(op1, op2, vl)
#define __riscv_vmand_mm_b16(op1, op2, vl) __riscv_th_vmand_mm_b16(op1, op2, vl)
#define __riscv_vmand_mm_b32(op1, op2, vl) __riscv_th_vmand_mm_b32(op1, op2, vl)
#define __riscv_vmand_mm_b64(op1, op2, vl) __riscv_th_vmand_mm_b64(op1, op2, vl)
#define __riscv_vmandnot_mm_b1(op1, op2, vl) __riscv_th_vmandnot_mm_b1(op1, op2, vl)
#define __riscv_vmandnot_mm_b2(op1, op2, vl) __riscv_th_vmandnot_mm_b2(op1, op2, vl)
#define __riscv_vmandnot_mm_b4(op1, op2, vl) __riscv_th_vmandnot_mm_b4(op1, op2, vl)
#define __riscv_vmandnot_mm_b8(op1, op2, vl) __riscv_th_vmandnot_mm_b8(op1, op2, vl)
#define __riscv_vmandnot_mm_b16(op1, op2, vl) __riscv_th_vmandnot_mm_b16(op1, op2, vl)
#define __riscv_vmandnot_mm_b32(op1, op2, vl) __riscv_th_vmandnot_mm_b32(op1, op2, vl)
#define __riscv_vmandnot_mm_b64(op1, op2, vl) __riscv_th_vmandnot_mm_b64(op1, op2, vl)
#define __riscv_vmnand_mm_b1(op1, op2, vl) __riscv_th_vmnand_mm_b1(op1, op2, vl)
#define __riscv_vmnand_mm_b2(op1, op2, vl) __riscv_th_vmnand_mm_b2(op1, op2, vl)
#define __riscv_vmnand_mm_b4(op1, op2, vl) __riscv_th_vmnand_mm_b4(op1, op2, vl)
#define __riscv_vmnand_mm_b8(op1, op2, vl) __riscv_th_vmnand_mm_b8(op1, op2, vl)
#define __riscv_vmnand_mm_b16(op1, op2, vl) __riscv_th_vmnand_mm_b16(op1, op2, vl)
#define __riscv_vmnand_mm_b32(op1, op2, vl) __riscv_th_vmnand_mm_b32(op1, op2, vl)
#define __riscv_vmnand_mm_b64(op1, op2, vl) __riscv_th_vmnand_mm_b64(op1, op2, vl)
#define __riscv_vmnor_mm_b1(op1, op2, vl) __riscv_th_vmnor_mm_b1(op1, op2, vl)
#define __riscv_vmnor_mm_b2(op1, op2, vl) __riscv_th_vmnor_mm_b2(op1, op2, vl)
#define __riscv_vmnor_mm_b4(op1, op2, vl) __riscv_th_vmnor_mm_b4(op1, op2, vl)
#define __riscv_vmnor_mm_b8(op1, op2, vl) __riscv_th_vmnor_mm_b8(op1, op2, vl)
#define __riscv_vmnor_mm_b16(op1, op2, vl) __riscv_th_vmnor_mm_b16(op1, op2, vl)
#define __riscv_vmnor_mm_b32(op1, op2, vl) __riscv_th_vmnor_mm_b32(op1, op2, vl)
#define __riscv_vmnor_mm_b64(op1, op2, vl) __riscv_th_vmnor_mm_b64(op1, op2, vl)
#define __riscv_vmor_mm_b1(op1, op2, vl) __riscv_th_vmor_mm_b1(op1, op2, vl)
#define __riscv_vmor_mm_b2(op1, op2, vl) __riscv_th_vmor_mm_b2(op1, op2, vl)
#define __riscv_vmor_mm_b4(op1, op2, vl) __riscv_th_vmor_mm_b4(op1, op2, vl)
#define __riscv_vmor_mm_b8(op1, op2, vl) __riscv_th_vmor_mm_b8(op1, op2, vl)
#define __riscv_vmor_mm_b16(op1, op2, vl) __riscv_th_vmor_mm_b16(op1, op2, vl)
#define __riscv_vmor_mm_b32(op1, op2, vl) __riscv_th_vmor_mm_b32(op1, op2, vl)
#define __riscv_vmor_mm_b64(op1, op2, vl) __riscv_th_vmor_mm_b64(op1, op2, vl)
#define __riscv_vmornot_mm_b1(op1, op2, vl) __riscv_th_vmornot_mm_b1(op1, op2, vl)
#define __riscv_vmornot_mm_b2(op1, op2, vl) __riscv_th_vmornot_mm_b2(op1, op2, vl)
#define __riscv_vmornot_mm_b4(op1, op2, vl) __riscv_th_vmornot_mm_b4(op1, op2, vl)
#define __riscv_vmornot_mm_b8(op1, op2, vl) __riscv_th_vmornot_mm_b8(op1, op2, vl)
#define __riscv_vmornot_mm_b16(op1, op2, vl) __riscv_th_vmornot_mm_b16(op1, op2, vl)
#define __riscv_vmornot_mm_b32(op1, op2, vl) __riscv_th_vmornot_mm_b32(op1, op2, vl)
#define __riscv_vmornot_mm_b64(op1, op2, vl) __riscv_th_vmornot_mm_b64(op1, op2, vl)
#define __riscv_vmxnor_mm_b1(op1, op2, vl) __riscv_th_vmxnor_mm_b1(op1, op2, vl)
#define __riscv_vmxnor_mm_b2(op1, op2, vl) __riscv_th_vmxnor_mm_b2(op1, op2, vl)
#define __riscv_vmxnor_mm_b4(op1, op2, vl) __riscv_th_vmxnor_mm_b4(op1, op2, vl)
#define __riscv_vmxnor_mm_b8(op1, op2, vl) __riscv_th_vmxnor_mm_b8(op1, op2, vl)
#define __riscv_vmxnor_mm_b16(op1, op2, vl) __riscv_th_vmxnor_mm_b16(op1, op2, vl)
#define __riscv_vmxnor_mm_b32(op1, op2, vl) __riscv_th_vmxnor_mm_b32(op1, op2, vl)
#define __riscv_vmxnor_mm_b64(op1, op2, vl) __riscv_th_vmxnor_mm_b64(op1, op2, vl)
#define __riscv_vmxor_mm_b1(op1, op2, vl) __riscv_th_vmxor_mm_b1(op1, op2, vl)
#define __riscv_vmxor_mm_b2(op1, op2, vl) __riscv_th_vmxor_mm_b2(op1, op2, vl)
#define __riscv_vmxor_mm_b4(op1, op2, vl) __riscv_th_vmxor_mm_b4(op1, op2, vl)
#define __riscv_vmxor_mm_b8(op1, op2, vl) __riscv_th_vmxor_mm_b8(op1, op2, vl)
#define __riscv_vmxor_mm_b16(op1, op2, vl) __riscv_th_vmxor_mm_b16(op1, op2, vl)
#define __riscv_vmxor_mm_b32(op1, op2, vl) __riscv_th_vmxor_mm_b32(op1, op2, vl)
#define __riscv_vmxor_mm_b64(op1, op2, vl) __riscv_th_vmxor_mm_b64(op1, op2, vl)

#define __riscv_vmclr_m_b1(vl) __riscv_th_vmclr_m_b1(vl)
#define __riscv_vmclr_m_b2(vl) __riscv_th_vmclr_m_b2(vl)
#define __riscv_vmclr_m_b4(vl) __riscv_th_vmclr_m_b4(vl)
#define __riscv_vmclr_m_b8(vl) __riscv_th_vmclr_m_b8(vl)
#define __riscv_vmclr_m_b16(vl) __riscv_th_vmclr_m_b16(vl)
#define __riscv_vmclr_m_b32(vl) __riscv_th_vmclr_m_b32(vl)
#define __riscv_vmclr_m_b64(vl) __riscv_th_vmclr_m_b64(vl)
#define __riscv_vmset_m_b1(vl) __riscv_th_vmset_m_b1(vl)
#define __riscv_vmset_m_b2(vl) __riscv_th_vmset_m_b2(vl)
#define __riscv_vmset_m_b4(vl) __riscv_th_vmset_m_b4(vl)
#define __riscv_vmset_m_b8(vl) __riscv_th_vmset_m_b8(vl)
#define __riscv_vmset_m_b16(vl) __riscv_th_vmset_m_b16(vl)
#define __riscv_vmset_m_b32(vl) __riscv_th_vmset_m_b32(vl)
#define __riscv_vmset_m_b64(vl) __riscv_th_vmset_m_b64(vl)
#define __riscv_vmmv_m_b1(op1, vl) __riscv_th_vmmv_m_b1(op1, vl)
#define __riscv_vmmv_m_b2(op1, vl) __riscv_th_vmmv_m_b2(op1, vl)
#define __riscv_vmmv_m_b4(op1, vl) __riscv_th_vmmv_m_b4(op1, vl)
#define __riscv_vmmv_m_b8(op1, vl) __riscv_th_vmmv_m_b8(op1, vl)
#define __riscv_vmmv_m_b16(op1, vl) __riscv_th_vmmv_m_b16(op1, vl)
#define __riscv_vmmv_m_b32(op1, vl) __riscv_th_vmmv_m_b32(op1, vl)
#define __riscv_vmmv_m_b64(op1, vl) __riscv_th_vmmv_m_b64(op1, vl)
#define __riscv_vmnot_m_b1(op1, vl) __riscv_th_vmnot_m_b1(op1, vl)
#define __riscv_vmnot_m_b2(op1, vl) __riscv_th_vmnot_m_b2(op1, vl)
#define __riscv_vmnot_m_b4(op1, vl) __riscv_th_vmnot_m_b4(op1, vl)
#define __riscv_vmnot_m_b8(op1, vl) __riscv_th_vmnot_m_b8(op1, vl)

#define __riscv_vpopc_m_b1(op1, vl) __riscv_th_vpopc_m_b1(op1, vl)
#define __riscv_vpopc_m_b2(op1, vl) __riscv_th_vpopc_m_b2(op1, vl)
#define __riscv_vpopc_m_b4(op1, vl) __riscv_th_vpopc_m_b4(op1, vl)
#define __riscv_vpopc_m_b8(op1, vl) __riscv_th_vpopc_m_b8(op1, vl)
#define __riscv_vpopc_m_b1_m(mask, op1, vl) __riscv_th_vpopc_m_b1_m(mask, op1, vl)
#define __riscv_vpopc_m_b2_m(mask, op1, vl) __riscv_th_vpopc_m_b2_m(mask, op1, vl)
#define __riscv_vpopc_m_b4_m(mask, op1, vl) __riscv_th_vpopc_m_b4_m(mask, op1, vl)
#define __riscv_vpopc_m_b8_m(mask, op1, vl) __riscv_th_vpopc_m_b8_m(mask, op1, vl)
#define __riscv_vmnot_m_b16(op1, vl) __riscv_th_vmnot_m_b16(op1, vl)
#define __riscv_vmnot_m_b32(op1, vl) __riscv_th_vmnot_m_b32(op1, vl)
#define __riscv_vmnot_m_b64(op1, vl) __riscv_th_vmnot_m_b64(op1, vl)

// XTHeadVector uses `vpopc`
#define __riscv_vpopc_m_b1(op1, vl) __riscv_th_vcpop_m_b1(op1, vl)
#define __riscv_vpopc_m_b2(op1, vl) __riscv_th_vcpop_m_b2(op1, vl)
#define __riscv_vpopc_m_b4(op1, vl) __riscv_th_vcpop_m_b4(op1, vl)
#define __riscv_vpopc_m_b8(op1, vl) __riscv_th_vcpop_m_b8(op1, vl)
#define __riscv_vpopc_m_b16(op1, vl) __riscv_th_vcpop_m_b16(op1, vl)
#define __riscv_vpopc_m_b32(op1, vl) __riscv_th_vcpop_m_b32(op1, vl)
#define __riscv_vpopc_m_b64(op1, vl) __riscv_th_vcpop_m_b64(op1, vl)
#define __riscv_vpopc_m_b1_m(mask, op1, vl) __riscv_th_vcpop_m_b1_m(mask, op1, vl)
#define __riscv_vpopc_m_b2_m(mask, op1, vl) __riscv_th_vcpop_m_b2_m(mask, op1, vl)
#define __riscv_vpopc_m_b4_m(mask, op1, vl) __riscv_th_vcpop_m_b4_m(mask, op1, vl)
#define __riscv_vpopc_m_b8_m(mask, op1, vl) __riscv_th_vcpop_m_b8_m(mask, op1, vl)
#define __riscv_vpopc_m_b16_m(mask, op1, vl) __riscv_th_vcpop_m_b16_m(mask, op1, vl)
#define __riscv_vpopc_m_b32_m(mask, op1, vl) __riscv_th_vcpop_m_b32_m(mask, op1, vl)
#define __riscv_vpopc_m_b64_m(mask, op1, vl) __riscv_th_vcpop_m_b64_m(mask, op1, vl)
// RVV 1.0 uses `vcpop` instead of `vpopc`
#define __riscv_vcpop_m_b1(op1, vl) __riscv_th_vcpop_m_b1(op1, vl)
#define __riscv_vcpop_m_b2(op1, vl) __riscv_th_vcpop_m_b2(op1, vl)
#define __riscv_vcpop_m_b4(op1, vl) __riscv_th_vcpop_m_b4(op1, vl)
#define __riscv_vcpop_m_b8(op1, vl) __riscv_th_vcpop_m_b8(op1, vl)
#define __riscv_vcpop_m_b16(op1, vl) __riscv_th_vcpop_m_b16(op1, vl)
#define __riscv_vcpop_m_b32(op1, vl) __riscv_th_vcpop_m_b32(op1, vl)
#define __riscv_vcpop_m_b64(op1, vl) __riscv_th_vcpop_m_b64(op1, vl)
#define __riscv_vcpop_m_b1_m(mask, op1, vl) __riscv_th_vcpop_m_b1_m(mask, op1, vl)
#define __riscv_vcpop_m_b2_m(mask, op1, vl) __riscv_th_vcpop_m_b2_m(mask, op1, vl)
#define __riscv_vcpop_m_b4_m(mask, op1, vl) __riscv_th_vcpop_m_b4_m(mask, op1, vl)
#define __riscv_vcpop_m_b8_m(mask, op1, vl) __riscv_th_vcpop_m_b8_m(mask, op1, vl)
#define __riscv_vcpop_m_b16_m(mask, op1, vl) __riscv_th_vcpop_m_b16_m(mask, op1, vl)
#define __riscv_vcpop_m_b32_m(mask, op1, vl) __riscv_th_vcpop_m_b32_m(mask, op1, vl)
#define __riscv_vcpop_m_b64_m(mask, op1, vl) __riscv_th_vcpop_m_b64_m(mask, op1, vl)

#define __riscv_vfirst_m_b1(op1, vl) __riscv_th_vfirst_m_b1(op1, vl)
#define __riscv_vfirst_m_b2(op1, vl) __riscv_th_vfirst_m_b2(op1, vl)
#define __riscv_vfirst_m_b4(op1, vl) __riscv_th_vfirst_m_b4(op1, vl)
#define __riscv_vfirst_m_b8(op1, vl) __riscv_th_vfirst_m_b8(op1, vl)
#define __riscv_vfirst_m_b16(op1, vl) __riscv_th_vfirst_m_b16(op1, vl)
#define __riscv_vfirst_m_b32(op1, vl) __riscv_th_vfirst_m_b32(op1, vl)
#define __riscv_vfirst_m_b64(op1, vl) __riscv_th_vfirst_m_b64(op1, vl)
#define __riscv_vfirst_m_b1_m(mask, op1, vl) __riscv_th_vfirst_m_b1_m(mask, op1, vl)
#define __riscv_vfirst_m_b2_m(mask, op1, vl) __riscv_th_vfirst_m_b2_m(mask, op1, vl)
#define __riscv_vfirst_m_b4_m(mask, op1, vl) __riscv_th_vfirst_m_b4_m(mask, op1, vl)
#define __riscv_vfirst_m_b8_m(mask, op1, vl) __riscv_th_vfirst_m_b8_m(mask, op1, vl)
#define __riscv_vfirst_m_b16_m(mask, op1, vl) __riscv_th_vfirst_m_b16_m(mask, op1, vl)
#define __riscv_vfirst_m_b32_m(mask, op1, vl) __riscv_th_vfirst_m_b32_m(mask, op1, vl)
#define __riscv_vfirst_m_b64_m(mask, op1, vl) __riscv_th_vfirst_m_b64_m(mask, op1, vl)

#define __riscv_vmsbf_m_b1(op1, vl) __riscv_th_vmsbf_m_b1(op1, vl)
#define __riscv_vmsbf_m_b2(op1, vl) __riscv_th_vmsbf_m_b2(op1, vl)
#define __riscv_vmsbf_m_b4(op1, vl) __riscv_th_vmsbf_m_b4(op1, vl)
#define __riscv_vmsbf_m_b8(op1, vl) __riscv_th_vmsbf_m_b8(op1, vl)
#define __riscv_vmsbf_m_b16(op1, vl) __riscv_th_vmsbf_m_b16(op1, vl)
#define __riscv_vmsbf_m_b32(op1, vl) __riscv_th_vmsbf_m_b32(op1, vl)
#define __riscv_vmsbf_m_b64(op1, vl) __riscv_th_vmsbf_m_b64(op1, vl)
#define __riscv_vmsbf_m_b1_m(mask, maskedoff, op1, vl) __riscv_th_vmsbf_m_b1_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsbf_m_b2_m(mask, maskedoff, op1, vl) __riscv_th_vmsbf_m_b2_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsbf_m_b4_m(mask, maskedoff, op1, vl) __riscv_th_vmsbf_m_b4_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsbf_m_b8_m(mask, maskedoff, op1, vl) __riscv_th_vmsbf_m_b8_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsbf_m_b16_m(mask, maskedoff, op1, vl) __riscv_th_vmsbf_m_b16_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsbf_m_b32_m(mask, maskedoff, op1, vl) __riscv_th_vmsbf_m_b32_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsbf_m_b64_m(mask, maskedoff, op1, vl) __riscv_th_vmsbf_m_b64_mu(mask, maskedoff, op1, vl)

#define __riscv_vmsof_m_b1(op1, vl) __riscv_th_vmsof_m_b1(op1, vl)
#define __riscv_vmsof_m_b2(op1, vl) __riscv_th_vmsof_m_b2(op1, vl)
#define __riscv_vmsof_m_b4(op1, vl) __riscv_th_vmsof_m_b4(op1, vl)
#define __riscv_vmsof_m_b8(op1, vl) __riscv_th_vmsof_m_b8(op1, vl)
#define __riscv_vmsof_m_b16(op1, vl) __riscv_th_vmsof_m_b16(op1, vl)
#define __riscv_vmsof_m_b32(op1, vl) __riscv_th_vmsof_m_b32(op1, vl)
#define __riscv_vmsof_m_b64(op1, vl) __riscv_th_vmsof_m_b64(op1, vl)
#define __riscv_vmsof_m_b1_m(mask, maskedoff, op1, vl) __riscv_th_vmsof_m_b1_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsof_m_b2_m(mask, maskedoff, op1, vl) __riscv_th_vmsof_m_b2_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsof_m_b4_m(mask, maskedoff, op1, vl) __riscv_th_vmsof_m_b4_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsof_m_b8_m(mask, maskedoff, op1, vl) __riscv_th_vmsof_m_b8_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsof_m_b16_m(mask, maskedoff, op1, vl) __riscv_th_vmsof_m_b16_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsof_m_b32_m(mask, maskedoff, op1, vl) __riscv_th_vmsof_m_b32_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsof_m_b64_m(mask, maskedoff, op1, vl) __riscv_th_vmsof_m_b64_mu(mask, maskedoff, op1, vl)

#define __riscv_vmsif_m_b1(op1, vl) __riscv_th_vmsif_m_b1(op1, vl)
#define __riscv_vmsif_m_b2(op1, vl) __riscv_th_vmsif_m_b2(op1, vl)
#define __riscv_vmsif_m_b4(op1, vl) __riscv_th_vmsif_m_b4(op1, vl)
#define __riscv_vmsif_m_b8(op1, vl) __riscv_th_vmsif_m_b8(op1, vl)
#define __riscv_vmsif_m_b16(op1, vl) __riscv_th_vmsif_m_b16(op1, vl)
#define __riscv_vmsif_m_b32(op1, vl) __riscv_th_vmsif_m_b32(op1, vl)
#define __riscv_vmsif_m_b64(op1, vl) __riscv_th_vmsif_m_b64(op1, vl)
#define __riscv_vmsif_m_b1_m(mask, maskedoff, op1, vl) __riscv_th_vmsif_m_b1_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsif_m_b2_m(mask, maskedoff, op1, vl) __riscv_th_vmsif_m_b2_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsif_m_b4_m(mask, maskedoff, op1, vl) __riscv_th_vmsif_m_b4_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsif_m_b8_m(mask, maskedoff, op1, vl) __riscv_th_vmsif_m_b8_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsif_m_b16_m(mask, maskedoff, op1, vl) __riscv_th_vmsif_m_b16_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsif_m_b32_m(mask, maskedoff, op1, vl) __riscv_th_vmsif_m_b32_mu(mask, maskedoff, op1, vl)
#define __riscv_vmsif_m_b64_m(mask, maskedoff, op1, vl) __riscv_th_vmsif_m_b64_mu(mask, maskedoff, op1, vl)

#define __riscv_vid_v_u8m1(vl) __riscv_th_vid_v_u8m1(vl)
#define __riscv_vid_v_u8m2(vl) __riscv_th_vid_v_u8m2(vl)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,36 @@ unsigned long test_vcpop_m_b8(vbool8_t op1, size_t vl) {
return __riscv_th_vcpop_m_b8(op1, vl);
}

// CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b16
// CHECK-RV64-SAME: (<vscale x 4 x i1> [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.nxv4i1.i64(<vscale x 4 x i1> [[OP1]], i64 [[VL]])
// CHECK-RV64-NEXT: ret i64 [[TMP0]]
//
unsigned long test_vcpop_m_b16(vbool16_t op1, size_t vl) {
return __riscv_th_vcpop_m_b16(op1, vl);
}

// CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b32
// CHECK-RV64-SAME: (<vscale x 2 x i1> [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.nxv2i1.i64(<vscale x 2 x i1> [[OP1]], i64 [[VL]])
// CHECK-RV64-NEXT: ret i64 [[TMP0]]
//
unsigned long test_vcpop_m_b32(vbool32_t op1, size_t vl) {
return __riscv_th_vcpop_m_b32(op1, vl);
}

// CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b64
// CHECK-RV64-SAME: (<vscale x 1 x i1> [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.nxv1i1.i64(<vscale x 1 x i1> [[OP1]], i64 [[VL]])
// CHECK-RV64-NEXT: ret i64 [[TMP0]]
//
unsigned long test_vcpop_m_b64(vbool64_t op1, size_t vl) {
return __riscv_th_vcpop_m_b64(op1, vl);
}

// CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b1_m
// CHECK-RV64-SAME: (<vscale x 64 x i1> [[MASK:%.*]], <vscale x 64 x i1> [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
// CHECK-RV64-NEXT: entry:
Expand Down Expand Up @@ -85,3 +115,34 @@ unsigned long test_vcpop_m_b4_m(vbool4_t mask, vbool4_t op1, size_t vl) {
unsigned long test_vcpop_m_b8_m(vbool8_t mask, vbool8_t op1, size_t vl) {
return __riscv_th_vcpop_m_b8_m(mask, op1, vl);
}

// CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b16_m
// CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i1> [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.mask.nxv4i1.i64(<vscale x 4 x i1> [[OP1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]])
// CHECK-RV64-NEXT: ret i64 [[TMP0]]
//
unsigned long test_vcpop_m_b16_m(vbool16_t mask, vbool16_t op1, size_t vl) {
return __riscv_th_vcpop_m_b16_m(mask, op1, vl);
}

// CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b32_m
// CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i1> [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.mask.nxv2i1.i64(<vscale x 2 x i1> [[OP1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]])
// CHECK-RV64-NEXT: ret i64 [[TMP0]]
//
unsigned long test_vcpop_m_b32_m(vbool32_t mask, vbool32_t op1, size_t vl) {
return __riscv_th_vcpop_m_b32_m(mask, op1, vl);
}

// CHECK-RV64-LABEL: define dso_local i64 @test_vcpop_m_b64_m
// CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i1> [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.th.vmpopc.mask.nxv1i1.i64(<vscale x 1 x i1> [[OP1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]])
// CHECK-RV64-NEXT: ret i64 [[TMP0]]
//
unsigned long test_vcpop_m_b64_m(vbool64_t mask, vbool64_t op1, size_t vl) {
return __riscv_th_vcpop_m_b64_m(mask, op1, vl);
}

Loading

0 comments on commit 99ce8e8

Please sign in to comment.