From 4b3fc49c97e7e14f007ed8f2987322276e64eef3 Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 14:49:24 +0800 Subject: [PATCH 01/15] [Fix] onnx custom layer --- .../onnx2paddle/onnx_custom_layer/nms.py | 201 ++++++++++++------ .../onnx2paddle/onnx_custom_layer/one_hot.py | 2 +- .../onnx_custom_layer/roi_align.py | 137 +++++++----- 3 files changed, 220 insertions(+), 120 deletions(-) diff --git a/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/nms.py b/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/nms.py index 44f946e6b..56ce48296 100644 --- a/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/nms.py +++ b/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/nms.py @@ -17,68 +17,147 @@ from paddle import in_dynamic_mode from paddle.common_ops_import import Variable, LayerHelper +from x2paddle.utils import check_version -def multiclass_nms(bboxes, - scores, - score_threshold, - nms_top_k, - keep_top_k, - nms_threshold=0.3, - normalized=True, - nms_eta=1., - background_label=-1, - return_index=False, - return_rois_num=True, - rois_num=None, - name=None): - helper = LayerHelper('multiclass_nms3', **locals()) - - if in_dynamic_mode(): - attrs = ('background_label', background_label, 'score_threshold', - score_threshold, 'nms_top_k', nms_top_k, 'nms_threshold', - nms_threshold, 'keep_top_k', keep_top_k, 'nms_eta', nms_eta, - 'normalized', normalized) - output, index, nms_rois_num = _C_ops.multiclass_nms3( - bboxes, scores, rois_num, *attrs) - if not return_index: - index = None - return output, nms_rois_num, index - - else: - output = helper.create_variable_for_type_inference(dtype=bboxes.dtype) - index = helper.create_variable_for_type_inference(dtype='int') - - inputs = {'BBoxes': bboxes, 'Scores': scores} - outputs = {'Out': output, 'Index': index} - - if rois_num is not None: - inputs['RoisNum'] = rois_num - - if return_rois_num: - nms_rois_num = helper.create_variable_for_type_inference( - dtype='int32') - outputs['NmsRoisNum'] = nms_rois_num - - helper.append_op(type="multiclass_nms3", - inputs=inputs, - attrs={ - 'background_label': background_label, - 'score_threshold': score_threshold, - 'nms_top_k': nms_top_k, - 'nms_threshold': nms_threshold, - 'keep_top_k': keep_top_k, - 'nms_eta': nms_eta, - 'normalized': normalized - }, - outputs=outputs) - output.stop_gradient = True - index.stop_gradient = True - if not return_index: - index = None - if not return_rois_num: - nms_rois_num = None - - return output, nms_rois_num, index +if check_version('2.5.0'): + + def multiclass_nms3( + bboxes, + scores, + rois_num=None, + score_threshold=0.3, + nms_top_k=1000, + keep_top_k=100, + nms_threshold=0.3, + normalized=True, + nms_eta=1.0, + background_label=-1, + return_index=True, + return_rois_num=True, + name=None, + ): + helper = LayerHelper('multiclass_nms3', **locals()) + + if paddle.in_dynamic_mode(): + attrs = ( + score_threshold, + nms_top_k, + keep_top_k, + nms_threshold, + normalized, + nms_eta, + background_label, + ) + output, index, nms_rois_num = _C_ops.multiclass_nms3( + bboxes, scores, rois_num, *attrs) + if not return_index: + index = None + return output, nms_rois_num, index + else: + output = helper.create_variable_for_type_inference( + dtype=bboxes.dtype) + index = helper.create_variable_for_type_inference(dtype='int32') + + inputs = {'BBoxes': bboxes, 'Scores': scores} + outputs = {'Out': output, 'Index': index} + + if rois_num is not None: + inputs['RoisNum'] = rois_num + + if return_rois_num: + nms_rois_num = helper.create_variable_for_type_inference( + dtype='int32') + outputs['NmsRoisNum'] = nms_rois_num + + helper.append_op( + type="multiclass_nms3", + inputs=inputs, + attrs={ + 'background_label': background_label, + 'score_threshold': score_threshold, + 'nms_top_k': nms_top_k, + 'nms_threshold': nms_threshold, + 'keep_top_k': keep_top_k, + 'nms_eta': nms_eta, + 'normalized': normalized, + }, + outputs=outputs, + ) + output.stop_gradient = True + index.stop_gradient = True + if not return_index: + index = None + if not return_rois_num: + nms_rois_num = None + + return output, nms_rois_num, index + + multiclass_nms = multiclass_nms3 + +else: + + def multiclass_nms(bboxes, + scores, + score_threshold, + nms_top_k, + keep_top_k, + nms_threshold=0.3, + normalized=True, + nms_eta=1., + background_label=-1, + return_index=False, + return_rois_num=True, + rois_num=None, + name=None): + helper = LayerHelper('multiclass_nms3', **locals()) + + if in_dynamic_mode(): + attrs = ('background_label', background_label, 'score_threshold', + score_threshold, 'nms_top_k', nms_top_k, 'nms_threshold', + nms_threshold, 'keep_top_k', keep_top_k, 'nms_eta', + nms_eta, 'normalized', normalized) + output, index, nms_rois_num = _C_ops.multiclass_nms3( + bboxes, scores, rois_num, *attrs) + if not return_index: + index = None + return output, nms_rois_num, index + + else: + output = helper.create_variable_for_type_inference( + dtype=bboxes.dtype) + index = helper.create_variable_for_type_inference(dtype='int') + + inputs = {'BBoxes': bboxes, 'Scores': scores} + outputs = {'Out': output, 'Index': index} + + if rois_num is not None: + inputs['RoisNum'] = rois_num + + if return_rois_num: + nms_rois_num = helper.create_variable_for_type_inference( + dtype='int32') + outputs['NmsRoisNum'] = nms_rois_num + + helper.append_op(type="multiclass_nms3", + inputs=inputs, + attrs={ + 'background_label': background_label, + 'score_threshold': score_threshold, + 'nms_top_k': nms_top_k, + 'nms_threshold': nms_threshold, + 'keep_top_k': keep_top_k, + 'nms_eta': nms_eta, + 'normalized': normalized + }, + outputs=outputs) + output.stop_gradient = True + index.stop_gradient = True + if not return_index: + index = None + if not return_rois_num: + nms_rois_num = None + + return output, nms_rois_num, index class NMS(object): diff --git a/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/one_hot.py b/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/one_hot.py index aa29a2636..1c08cfeef 100644 --- a/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/one_hot.py +++ b/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/one_hot.py @@ -34,7 +34,7 @@ def __call__(self, indices, depth, values): (rank - real_axis)) mod = paddle.mod(indices, depth) v = paddle.reshape(mod, ls + (1, ) + rs) - out = targets == v + out = targets == v.astype(targets.dtype) out = paddle.cast(out, "float32") on_value = paddle.slice(values, axes=[0], starts=[1], ends=[2]) off_value = paddle.slice(values, axes=[0], starts=[0], ends=[1]) diff --git a/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/roi_align.py b/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/roi_align.py index 3ee27d7e6..33c30dfd3 100644 --- a/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/roi_align.py +++ b/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/roi_align.py @@ -17,67 +17,88 @@ from paddle import in_dynamic_mode from paddle.common_ops_import import Variable, LayerHelper, check_variable_and_dtype, check_type, check_dtype +from x2paddle.utils import check_version -@paddle.jit.not_to_static -def roi_align(input, - rois, - pooled_height, - pooled_width, - spatial_scale=1.0, - sampling_ratio=-1, - rois_num=None, - aligned=False, - name=None): - if in_dynamic_mode(): - assert rois_num is not None, "rois_num should not be None in dygraph mode." - align_out = _C_ops.roi_align(input, rois, rois_num, "pooled_height", - pooled_height, "pooled_width", - pooled_width, "spatial_scale", - spatial_scale, "sampling_ratio", - sampling_ratio, "aligned", aligned) - return align_out +if check_version('2.5.0'): - else: - check_variable_and_dtype(input, 'input', ['float32', 'float64'], - 'roi_align') - check_variable_and_dtype(rois, 'rois', ['float32', 'float64'], - 'roi_align') - helper = LayerHelper('roi_align', **locals()) - dtype = helper.input_dtype() - align_out = helper.create_variable_for_type_inference(dtype) - inputs = { - "X": input, - "ROIs": rois, - } - if rois_num is not None: - inputs['RoisNum'] = rois_num - helper.append_op(type="roi_align", - inputs=inputs, - outputs={"Out": align_out}, - attrs={ - "pooled_height": pooled_height, - "pooled_width": pooled_width, - "spatial_scale": spatial_scale, - "sampling_ratio": sampling_ratio, - "aligned": aligned, - }) - return align_out + class ROIAlign(object): + def __init__(self, pooled_height, pooled_width, spatial_scale, + sampling_ratio): + self.roialign_layer_attrs = { + "output_size": (pooled_height, pooled_width), + "spatial_scale": spatial_scale, + 'sampling_ratio': sampling_ratio, + } -class ROIAlign(object): + def __call__(self, x0, x1, x2): + out = paddle.vision.ops.roi_align(x=x0, + boxes=x1, + boxes_num=x2, + **self.roialign_layer_attrs) + return out - def __init__(self, pooled_height, pooled_width, spatial_scale, - sampling_ratio): - self.roialign_layer_attrs = { - "pooled_height": pooled_height, - "pooled_width": pooled_width, - "spatial_scale": spatial_scale, - 'sampling_ratio': sampling_ratio, - } +else: - def __call__(self, x0, x1, x2): - out = roi_align(input=x0, - rois=x1, - rois_num=x2, - **self.roialign_layer_attrs) - return out + @paddle.jit.not_to_static + def roi_align(input, + rois, + pooled_height, + pooled_width, + spatial_scale=1.0, + sampling_ratio=-1, + rois_num=None, + aligned=False, + name=None): + if in_dynamic_mode(): + assert rois_num is not None, "rois_num should not be None in dygraph mode." + align_out = _C_ops.roi_align(input, rois, rois_num, "pooled_height", + pooled_height, "pooled_width", + pooled_width, "spatial_scale", + spatial_scale, "sampling_ratio", + sampling_ratio, "aligned", aligned) + return align_out + + else: + check_variable_and_dtype(input, 'input', ['float32', 'float64'], + 'roi_align') + check_variable_and_dtype(rois, 'rois', ['float32', 'float64'], + 'roi_align') + helper = LayerHelper('roi_align', **locals()) + dtype = helper.input_dtype() + align_out = helper.create_variable_for_type_inference(dtype) + inputs = { + "X": input, + "ROIs": rois, + } + if rois_num is not None: + inputs['RoisNum'] = rois_num + helper.append_op(type="roi_align", + inputs=inputs, + outputs={"Out": align_out}, + attrs={ + "pooled_height": pooled_height, + "pooled_width": pooled_width, + "spatial_scale": spatial_scale, + "sampling_ratio": sampling_ratio, + "aligned": aligned, + }) + return align_out + + class ROIAlign(object): + + def __init__(self, pooled_height, pooled_width, spatial_scale, + sampling_ratio): + self.roialign_layer_attrs = { + "pooled_height": pooled_height, + "pooled_width": pooled_width, + "spatial_scale": spatial_scale, + 'sampling_ratio': sampling_ratio, + } + + def __call__(self, x0, x1, x2): + out = roi_align(input=x0, + rois=x1, + rois_num=x2, + **self.roialign_layer_attrs) + return out From 34210ec21845453730db4365a201ca1ed716ee24 Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 14:50:10 +0800 Subject: [PATCH 02/15] [Fix] numpy assacalar deprecated --- x2paddle/decoder/onnx_shape_inference.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/x2paddle/decoder/onnx_shape_inference.py b/x2paddle/decoder/onnx_shape_inference.py index af1962543..74e27e579 100644 --- a/x2paddle/decoder/onnx_shape_inference.py +++ b/x2paddle/decoder/onnx_shape_inference.py @@ -31,8 +31,18 @@ def get_attribute(node, attr_name, default_value=None): return default_value +def filter_dim(dim): + """ + onnx latest version make `unk_` dim instead of `None` + """ + if isinstance(dim, str) and dim.startswith('unk_'): + return None + + return dim + + def get_dim_from_type_proto(dim): - return getattr(dim, dim.WhichOneof('value')) if type( + return filter_dim(getattr(dim, dim.WhichOneof('value'))) if type( dim.WhichOneof('value')) == str else None @@ -73,7 +83,13 @@ def as_scalar(x): assert len(x) == 1 return x[0] elif type(x) == np.ndarray: - return np.asscalar(x) + _x = None + try: + _x = np.asscalar(x) + except: + _x = as_scalar(x.tolist()) + + return _x else: return x @@ -508,7 +524,7 @@ def _get_int_values(self, node, broadcast=False): if len(v.shape) > 1: new_v = None # ignore value for rank > 1 elif len(v.shape) == 0: - new_v = int(np.asscalar(v)) + new_v = int(as_scalar(v)) else: assert len(v.shape) == 1 new_v = [int(vv) for vv in v] From c9d4da29e2226086b5a717fad10a1ff7b745580f Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 14:50:49 +0800 Subject: [PATCH 03/15] [Fix] onnx SwinTransformer --- test_benchmark/ONNX/SwinTransformer/pd_infer.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/test_benchmark/ONNX/SwinTransformer/pd_infer.py b/test_benchmark/ONNX/SwinTransformer/pd_infer.py index 1e62cf887..fa3aafc7c 100644 --- a/test_benchmark/ONNX/SwinTransformer/pd_infer.py +++ b/test_benchmark/ONNX/SwinTransformer/pd_infer.py @@ -1,4 +1,3 @@ -import paddle.fluid as fluid import paddle import numpy as np import sys @@ -11,11 +10,9 @@ exe = paddle.static.Executor(paddle.CPUPlace()) # test dygraph - [prog, inputs, outputs] = fluid.io.load_inference_model( - dirname="pd_model_dygraph/inference_model/", - executor=exe, - model_filename="model.pdmodel", - params_filename="model.pdiparams") + [prog, inputs, outputs] = paddle.static.load_inference_model( + path_prefix="pd_model_dygraph/inference_model/model", executor=exe) + LN_nums = 0 for i, op in enumerate(prog.block(0).ops): if op.type in ['feed', 'fetch']: From b22f2aa26e644112ce75f6caff1fdbc9e20272d9 Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 14:51:06 +0800 Subject: [PATCH 04/15] [Fix] onnx bert6 --- test_benchmark/ONNX/bert6/pd_infer.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/test_benchmark/ONNX/bert6/pd_infer.py b/test_benchmark/ONNX/bert6/pd_infer.py index 3bebc6c4a..15730f63c 100644 --- a/test_benchmark/ONNX/bert6/pd_infer.py +++ b/test_benchmark/ONNX/bert6/pd_infer.py @@ -1,5 +1,4 @@ import paddle -import paddle.fluid as fluid import numpy as np import pickle import sys @@ -16,11 +15,9 @@ exe = paddle.static.Executor(paddle.CPUPlace()) # test dygraph - [prog, inputs, outputs] = fluid.io.load_inference_model( - dirname="pd_model_dygraph/inference_model/", - executor=exe, - model_filename="model.pdmodel", - params_filename="model.pdiparams") + [prog, inputs, outputs] = paddle.static.load_inference_model( + path_prefix="pd_model_dygraph/inference_model/model", executor=exe) + result = exe.run(prog, feed={ inputs[0]: input_data["input_ids"], From 71dab50b56f9952ee468250d2a254cf069daa68f Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 15:12:16 +0800 Subject: [PATCH 05/15] [Fix] onnx custom layer roi align --- .../onnx_custom_layer/roi_align.py | 103 +++++++++++++----- 1 file changed, 73 insertions(+), 30 deletions(-) diff --git a/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/roi_align.py b/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/roi_align.py index 33c30dfd3..eb17ca18c 100644 --- a/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/roi_align.py +++ b/x2paddle/op_mapper/onnx2paddle/onnx_custom_layer/roi_align.py @@ -15,29 +15,71 @@ import paddle from paddle import _C_ops from paddle import in_dynamic_mode +from paddle.base.framework import in_dynamic_or_pir_mode from paddle.common_ops_import import Variable, LayerHelper, check_variable_and_dtype, check_type, check_dtype from x2paddle.utils import check_version if check_version('2.5.0'): - class ROIAlign(object): + @paddle.jit.not_to_static + def roi_align( + input, + rois, + pooled_height, + pooled_width, + spatial_scale=1.0, + sampling_ratio=-1, + rois_num=None, + aligned=False, + name=None, + ): + # make input's param name like before + x = input + boxes = rois + boxes_num = rois_num - def __init__(self, pooled_height, pooled_width, spatial_scale, - sampling_ratio): - self.roialign_layer_attrs = { - "output_size": (pooled_height, pooled_width), - "spatial_scale": spatial_scale, - 'sampling_ratio': sampling_ratio, + if in_dynamic_or_pir_mode(): + assert ( + boxes_num + is not None), "boxes_num should not be None in dygraph mode." + return _C_ops.roi_align( + x, + boxes, + boxes_num, + pooled_height, + pooled_width, + spatial_scale, + sampling_ratio, + aligned, + ) + else: + check_variable_and_dtype(x, 'x', ['float32', 'float64'], + 'roi_align') + check_variable_and_dtype(boxes, 'boxes', ['float32', 'float64'], + 'roi_align') + helper = LayerHelper('roi_align', **locals()) + dtype = helper.input_dtype() + align_out = helper.create_variable_for_type_inference(dtype) + inputs = { + "X": x, + "ROIs": boxes, } - - def __call__(self, x0, x1, x2): - out = paddle.vision.ops.roi_align(x=x0, - boxes=x1, - boxes_num=x2, - **self.roialign_layer_attrs) - return out - + if boxes_num is not None: + inputs['RoisNum'] = boxes_num + helper.append_op( + type="roi_align", + inputs=inputs, + outputs={"Out": align_out}, + attrs={ + "pooled_height": pooled_height, + "pooled_width": pooled_width, + "spatial_scale": spatial_scale, + "sampling_ratio": sampling_ratio, + "aligned": aligned, + }, + ) + return align_out else: @paddle.jit.not_to_static @@ -85,20 +127,21 @@ def roi_align(input, }) return align_out - class ROIAlign(object): - def __init__(self, pooled_height, pooled_width, spatial_scale, - sampling_ratio): - self.roialign_layer_attrs = { - "pooled_height": pooled_height, - "pooled_width": pooled_width, - "spatial_scale": spatial_scale, - 'sampling_ratio': sampling_ratio, - } +class ROIAlign(object): + + def __init__(self, pooled_height, pooled_width, spatial_scale, + sampling_ratio): + self.roialign_layer_attrs = { + "pooled_height": pooled_height, + "pooled_width": pooled_width, + "spatial_scale": spatial_scale, + 'sampling_ratio': sampling_ratio, + } - def __call__(self, x0, x1, x2): - out = roi_align(input=x0, - rois=x1, - rois_num=x2, - **self.roialign_layer_attrs) - return out + def __call__(self, x0, x1, x2): + out = roi_align(input=x0, + rois=x1, + rois_num=x2, + **self.roialign_layer_attrs) + return out From e90751616ce23ac4d47e0f5ca4ce4a369fb6bc71 Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 15:12:54 +0800 Subject: [PATCH 06/15] [Fix] onnx mmdetection_fasterrcnn --- test_benchmark/ONNX/mmdetection_fasterrcnn/pd_infer.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/test_benchmark/ONNX/mmdetection_fasterrcnn/pd_infer.py b/test_benchmark/ONNX/mmdetection_fasterrcnn/pd_infer.py index 7eb27cc07..4016bebb9 100644 --- a/test_benchmark/ONNX/mmdetection_fasterrcnn/pd_infer.py +++ b/test_benchmark/ONNX/mmdetection_fasterrcnn/pd_infer.py @@ -1,5 +1,4 @@ import paddle -import paddle.fluid as fluid import numpy as np import pickle import sys @@ -10,11 +9,9 @@ paddle.enable_static() exe = paddle.static.Executor(paddle.CPUPlace()) # test Dygraph - [prog, inputs, outputs] = fluid.io.load_inference_model( - dirname="pd_model_dygraph/inference_model/", - executor=exe, - model_filename="model.pdmodel", - params_filename="model.pdiparams") + [prog, inputs, outputs] = paddle.static.load_inference_model( + path_prefix="pd_model_dygraph/inference_model/model", executor=exe) + data = np.load( "../dataset/mmdetection_fasterrcnn/real_img_data_fasterrcnn_50.npy") result = exe.run(prog, feed={inputs[0]: data}, fetch_list=outputs) From 3f7b386e4aa5ef8a2067ccf17129b115e935facf Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 15:15:38 +0800 Subject: [PATCH 07/15] [Fix] onnx mmdetection_fcos --- test_benchmark/ONNX/mmdetection_fcos/pd_infer.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/test_benchmark/ONNX/mmdetection_fcos/pd_infer.py b/test_benchmark/ONNX/mmdetection_fcos/pd_infer.py index 8c21b3067..f24a9055b 100644 --- a/test_benchmark/ONNX/mmdetection_fcos/pd_infer.py +++ b/test_benchmark/ONNX/mmdetection_fcos/pd_infer.py @@ -1,5 +1,4 @@ import paddle -import paddle.fluid as fluid import numpy as np import pickle import sys @@ -10,11 +9,9 @@ paddle.enable_static() exe = paddle.static.Executor(paddle.CPUPlace()) # test Dygraph - [prog, inputs, outputs] = fluid.io.load_inference_model( - dirname="pd_model_dygraph/inference_model/", - executor=exe, - model_filename="model.pdmodel", - params_filename="model.pdiparams") + [prog, inputs, outputs] = paddle.static.load_inference_model( + path_prefix="pd_model_dygraph/inference_model/model", executor=exe) + data = np.load("../dataset/mmdetection_fcos/real_img_data_fcos_50.npy") result = exe.run(prog, feed={inputs[0]: data}, fetch_list=outputs) From f6737bd17f2bfba59b5625efd758f03a49500ac9 Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 15:17:11 +0800 Subject: [PATCH 08/15] [Fix] onnx mmdetection_fsaf --- test_benchmark/ONNX/mmdetection_fsaf/pd_infer.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/test_benchmark/ONNX/mmdetection_fsaf/pd_infer.py b/test_benchmark/ONNX/mmdetection_fsaf/pd_infer.py index f0cb1466b..48958dfd0 100644 --- a/test_benchmark/ONNX/mmdetection_fsaf/pd_infer.py +++ b/test_benchmark/ONNX/mmdetection_fsaf/pd_infer.py @@ -1,5 +1,4 @@ import paddle -import paddle.fluid as fluid import numpy as np import pickle import sys @@ -10,11 +9,8 @@ paddle.enable_static() exe = paddle.static.Executor(paddle.CPUPlace()) # test Dygraph - [prog, inputs, outputs] = fluid.io.load_inference_model( - dirname="pd_model_dygraph/inference_model/", - executor=exe, - model_filename="model.pdmodel", - params_filename="model.pdiparams") + [prog, inputs, outputs] = paddle.static.load_inference_model( + path_prefix="pd_model_dygraph/inference_model/model", executor=exe) data = np.load("../dataset/mmdetection_fsaf/real_img_data_fsaf_50.npy") result = exe.run(prog, feed={inputs[0]: data}, fetch_list=outputs) From edbf911ebad4f2b9d988f3447ceb2cc94834e240 Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 15:18:47 +0800 Subject: [PATCH 09/15] [Fix] onnx mmdetection_retina --- test_benchmark/ONNX/mmdetection_retina/pd_infer.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/test_benchmark/ONNX/mmdetection_retina/pd_infer.py b/test_benchmark/ONNX/mmdetection_retina/pd_infer.py index 99dfffb60..7e9477729 100644 --- a/test_benchmark/ONNX/mmdetection_retina/pd_infer.py +++ b/test_benchmark/ONNX/mmdetection_retina/pd_infer.py @@ -1,5 +1,4 @@ import paddle -import paddle.fluid as fluid import numpy as np import pickle import sys @@ -10,11 +9,8 @@ paddle.enable_static() exe = paddle.static.Executor(paddle.CPUPlace()) # test Dygraph - [prog, inputs, outputs] = fluid.io.load_inference_model( - dirname="pd_model_dygraph/inference_model/", - executor=exe, - model_filename="model.pdmodel", - params_filename="model.pdiparams") + [prog, inputs, outputs] = paddle.static.load_inference_model( + path_prefix="pd_model_dygraph/inference_model/model", executor=exe) data = np.load("../dataset/mmdetection_retina/real_img_data_retina_50.npy") result = exe.run(prog, feed={inputs[0]: data}, fetch_list=outputs) From 5be4aa99db28b07a18da43b1fe024809663b4556 Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 15:20:08 +0800 Subject: [PATCH 10/15] [Fix] onnx mmdetection_ssd --- test_benchmark/ONNX/mmdetection_ssd/pd_infer.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/test_benchmark/ONNX/mmdetection_ssd/pd_infer.py b/test_benchmark/ONNX/mmdetection_ssd/pd_infer.py index 0f5d4eaf8..337aa97ff 100644 --- a/test_benchmark/ONNX/mmdetection_ssd/pd_infer.py +++ b/test_benchmark/ONNX/mmdetection_ssd/pd_infer.py @@ -1,5 +1,4 @@ import paddle -import paddle.fluid as fluid import numpy as np import pickle import sys @@ -10,11 +9,8 @@ paddle.enable_static() exe = paddle.static.Executor(paddle.CPUPlace()) # test Dygraph - [prog, inputs, outputs] = fluid.io.load_inference_model( - dirname="pd_model_dygraph/inference_model/", - executor=exe, - model_filename="model.pdmodel", - params_filename="model.pdiparams") + [prog, inputs, outputs] = paddle.static.load_inference_model( + path_prefix="pd_model_dygraph/inference_model/model", executor=exe) data = np.load("../dataset/mmdetection_ssd/real_img_data_ssd_50.npy") result = exe.run(prog, feed={inputs[0]: data}, fetch_list=outputs) From a0b54d60590a41fe59c64e12ee244ca5d1a5e0c2 Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 15:21:24 +0800 Subject: [PATCH 11/15] [Fix] onnx mmdetection_yolov3 --- test_benchmark/ONNX/mmdetection_yolov3/pd_infer.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/test_benchmark/ONNX/mmdetection_yolov3/pd_infer.py b/test_benchmark/ONNX/mmdetection_yolov3/pd_infer.py index d1566552a..26a130d54 100644 --- a/test_benchmark/ONNX/mmdetection_yolov3/pd_infer.py +++ b/test_benchmark/ONNX/mmdetection_yolov3/pd_infer.py @@ -1,5 +1,4 @@ import paddle -import paddle.fluid as fluid import numpy as np import pickle import sys @@ -10,11 +9,8 @@ paddle.enable_static() exe = paddle.static.Executor(paddle.CPUPlace()) # test Dygraph - [prog, inputs, outputs] = fluid.io.load_inference_model( - dirname="pd_model_dygraph/inference_model/", - executor=exe, - model_filename="model.pdmodel", - params_filename="model.pdiparams") + [prog, inputs, outputs] = paddle.static.load_inference_model( + path_prefix="pd_model_dygraph/inference_model/model", executor=exe) data = np.load("../dataset/mmdetection_yolov3/real_img_data_yolo_50.npy") result = exe.run(prog, feed={inputs[0]: data}, fetch_list=outputs) From 420fe4555eb93eab279b1a2b267e369280994181 Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 15:23:20 +0800 Subject: [PATCH 12/15] [Fix] onnx yolov5s --- test_benchmark/ONNX/yolov5s/pd_infer.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/test_benchmark/ONNX/yolov5s/pd_infer.py b/test_benchmark/ONNX/yolov5s/pd_infer.py index 211f378c8..2868d5278 100644 --- a/test_benchmark/ONNX/yolov5s/pd_infer.py +++ b/test_benchmark/ONNX/yolov5s/pd_infer.py @@ -1,4 +1,3 @@ -import paddle.fluid as fluid import paddle import numpy as np import sys @@ -10,11 +9,8 @@ exe = paddle.static.Executor(paddle.CPUPlace()) # test dygraph - [prog, inputs, outputs] = fluid.io.load_inference_model( - dirname="pd_model_dygraph/inference_model/", - executor=exe, - model_filename="model.pdmodel", - params_filename="model.pdiparams") + [prog, inputs, outputs] = paddle.static.load_inference_model( + path_prefix="pd_model_dygraph/inference_model/model", executor=exe) data = np.load('../dataset/yolov5s/input_0.npy') result = exe.run(prog, feed={inputs[0]: data}, fetch_list=outputs) From 98884ef49e77b8c2192377ad69c70c25fc8d5384 Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 15:24:58 +0800 Subject: [PATCH 13/15] [Fix] onnx mobilefacenet --- test_benchmark/ONNX/mobilefacenet/pd_infer.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/test_benchmark/ONNX/mobilefacenet/pd_infer.py b/test_benchmark/ONNX/mobilefacenet/pd_infer.py index a587101eb..48a2f2a79 100644 --- a/test_benchmark/ONNX/mobilefacenet/pd_infer.py +++ b/test_benchmark/ONNX/mobilefacenet/pd_infer.py @@ -1,5 +1,4 @@ import paddle -import paddle.fluid as fluid import numpy as np import pickle import sys @@ -11,11 +10,8 @@ exe = paddle.static.Executor(paddle.CPUPlace()) # test dygraph - [prog, inputs, outputs] = fluid.io.load_inference_model( - dirname="pd_model_dygraph/inference_model/", - executor=exe, - model_filename="model.pdmodel", - params_filename="model.pdiparams") + [prog, inputs, outputs] = paddle.static.load_inference_model( + path_prefix="pd_model_dygraph/inference_model/model", executor=exe) data = np.load('../dataset/mobilefacenet/input.npy') result = exe.run(prog, feed={inputs[0]: data}, fetch_list=outputs) From 522308feff25ac4b165deaa544cf2575ef52797e Mon Sep 17 00:00:00 2001 From: megemini Date: Thu, 24 Oct 2024 21:30:02 +0800 Subject: [PATCH 14/15] [Update] black.list --- test_benchmark/ONNX/black.list | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/test_benchmark/ONNX/black.list b/test_benchmark/ONNX/black.list index 7588b41fd..23ec8bbda 100644 --- a/test_benchmark/ONNX/black.list +++ b/test_benchmark/ONNX/black.list @@ -1,15 +1,3 @@ -SwinTransformer -bert6 -mmdetection_fasterrcnn -mmdetection_fcos -mmdetection_fsaf -mmdetection_retina -mmdetection_ssd -mmdetection_yolov3 -mnasnet_a1 -mobilefacenet -mobilenet_v2 -yolov5s dataset tools output From fe0d4213bcf5727cd7b994877ead50fba7fe12cf Mon Sep 17 00:00:00 2001 From: megemini Date: Fri, 25 Oct 2024 22:20:48 +0800 Subject: [PATCH 15/15] [Fix] onnx paddle.flatten --- x2paddle/op_mapper/onnx2paddle/opset_legacy.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/x2paddle/op_mapper/onnx2paddle/opset_legacy.py b/x2paddle/op_mapper/onnx2paddle/opset_legacy.py index 8c5712240..a023d4ebf 100644 --- a/x2paddle/op_mapper/onnx2paddle/opset_legacy.py +++ b/x2paddle/op_mapper/onnx2paddle/opset_legacy.py @@ -1603,10 +1603,10 @@ def Flatten(self, node): else: # flatten + reshape self.paddle_graph.add_layer("paddle.flatten", - inputs={"input": val_x.name}, + inputs={"x": val_x.name}, outputs=[val_x.name + "_flatten"], - start_axis=[0], - stop_axis=[axis]) + start_axis=0, + stop_axis=-1) self.paddle_graph.add_layer( 'paddle.reshape', inputs={'x': val_x.name + "_flatten"},