Skip to content

Commit

Permalink
qaq
Browse files Browse the repository at this point in the history
  • Loading branch information
nihui committed Feb 7, 2025
1 parent 3be03dd commit e096ffd
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 0 deletions.
23 changes: 23 additions & 0 deletions src/layer/loongarch/convolution_loongarch.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -950,6 +950,29 @@ int Convolution_loongarch::forward_int8_loongarch(const Mat& bottom_blob, Mat& t
}
}

#if __loongarch_sx
if (opt.use_packing_layout)
{
// NCNN_LOGE("top_blob_int32 %d %d", top_blob_int32.c, top_blob_int32.elempack);
if (use_int8_requantize)
{
// TODO implement winograd sgemm packed int8 pack1 output
if (top_blob_int32.elempack == 4 && top_blob_int32.c % 2 == 1)
{
Mat tmp;
convert_packing(top_blob_int32, tmp, 1, opt);
top_blob_int32 = tmp;
}
if (top_blob_int32.elempack == 4 && top_blob_int32.c % 2 == 0)
{
Mat tmp;
convert_packing(top_blob_int32, tmp, 8, opt);
top_blob_int32 = tmp;
}
}
}
#endif

if (use_int8_requantize)
{
requantize_from_int32_to_int8(top_blob_int32, top_blob, scale_in_data, top_blob_int8_scales, bias_data, activation_type, activation_params, opt);
Expand Down
23 changes: 23 additions & 0 deletions src/layer/mips/convolution_mips.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -950,6 +950,29 @@ int Convolution_mips::forward_int8_mips(const Mat& bottom_blob, Mat& top_blob, c
}
}

#if __mips_msa
if (opt.use_packing_layout)
{
// NCNN_LOGE("top_blob_int32 %d %d", top_blob_int32.c, top_blob_int32.elempack);
if (use_int8_requantize)
{
// TODO implement winograd sgemm packed int8 pack1 output
if (top_blob_int32.elempack == 4 && top_blob_int32.c % 2 == 1)
{
Mat tmp;
convert_packing(top_blob_int32, tmp, 1, opt);
top_blob_int32 = tmp;
}
if (top_blob_int32.elempack == 4 && top_blob_int32.c % 2 == 0)
{
Mat tmp;
convert_packing(top_blob_int32, tmp, 8, opt);
top_blob_int32 = tmp;
}
}
}
#endif

if (use_int8_requantize)
{
requantize_from_int32_to_int8(top_blob_int32, top_blob, scale_in_data, top_blob_int8_scales, bias_data, activation_type, activation_params, opt);
Expand Down

0 comments on commit e096ffd

Please sign in to comment.