diff --git a/mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/einsum_mapper.py b/mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/einsum_mapper.py index 75eb3e46..a4b5531c 100644 --- a/mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/einsum_mapper.py +++ b/mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/einsum_mapper.py @@ -57,6 +57,9 @@ class EinSumMapper(ONNXToMindSporeMapper): return False equation_right = equation[1] + if len(equation_right) != 3: + return False + pattern = ''.join([s for s in equation_left_list[0] if s in equation_left_list[1]]) output_first = re.sub(pattern, '', equation_left_list[0]) output_second = re.sub(pattern, '', equation_left_list[1]) @@ -78,15 +81,16 @@ class EinSumMapper(ONNXToMindSporeMapper): @staticmethod def _generate_snippet_template(**kwargs): op = kwargs.get("operation") - args = kwargs.get("converted_params") + args = kwargs.get("converted_params") if kwargs.get('raw_params') else kwargs['params'] weights = kwargs.get("weights") - input_shape = kwargs["raw_params"]["input_shape"] + input_shape = \ + kwargs["raw_params"]["input_shape"] if kwargs.get('raw_params') else kwargs['params']['input_shape'] trainable_params = kwargs.get("trainable_params", dict()) if not op: raise ValueError("Can not get MindSpore operation name.") variable_slot = "var_0" - init_template_list = [f"self.{{{variable_slot}}} = {op}()"] + init_template_list = [f"self.{{{variable_slot}}} = {op}({', '.join(['%s={%s}' % (p, p) for p in args])})"] default_shape = input_shape[:2] inputs_in_construct = [ diff --git a/mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/expand_dims_mapper.py b/mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/expand_dims_mapper.py index ec81244d..34c4f1a5 100644 --- a/mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/expand_dims_mapper.py +++ b/mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/expand_dims_mapper.py @@ -74,12 +74,12 @@ class ExpandDimsMapper(ONNXToMindSporeMapper): args["axis"] = axis init_template = f"self.{{{variable_slot}}}_axis = {{axis}}" construct_template = f"opt_{{{variable_slot}}} = self.{{{variable_slot}}}" \ - f"({{{variable_slot}}}_{idx - 1}, self.{{{variable_slot}}}_axis)" + f"(opt_{{{variable_slot}}}_{idx - 1}, self.{{{variable_slot}}}_axis)" else: args[f"axis_{idx}"] = axis init_template = f"self.{{{variable_slot}}}_{idx}_axis = {{axis_{idx}}}" construct_template = f"opt_{{{variable_slot}}}_{idx} = self.{{{variable_slot}}}" \ - f"({{{variable_slot}}}_{idx - 1}, self.{{{variable_slot}}}_{idx}_axis)" + f"(opt_{{{variable_slot}}}_{idx - 1}, self.{{{variable_slot}}}_{idx}_axis)" init_template_list.append(init_template) construct_template_list.append(construct_template) diff --git a/tests/ut/mindconverter/graph_based_converter/mapper/test_mapper.py b/tests/ut/mindconverter/graph_based_converter/mapper/test_mapper.py index d55547bb..41891a36 100644 --- a/tests/ut/mindconverter/graph_based_converter/mapper/test_mapper.py +++ b/tests/ut/mindconverter/graph_based_converter/mapper/test_mapper.py @@ -17,6 +17,8 @@ import numpy as np import pytest from mindinsight.mindconverter.graph_based_converter.mapper.base import ONNXToMindSporeMapper +from mindinsight.mindconverter.graph_based_converter.common.code_fragment import Fragment +from mindinsight.mindconverter.graph_based_converter.third_party_graph.onnx_utils import NodeWeight class TestMappers: @@ -26,82 +28,158 @@ class TestMappers: 'input': {'op_name': 'onnx::Conv', 'params': {'dilations': [1, 1], 'group': 1, + 'kernel_shape': [3, 4], 'pads': [1, 2, 3, 4], 'strides': [1, 1]}, - 'weights': {'weight': np.zeros((64, 3, 1, 1), dtype=np.int32)}}, - 'expected_output': {} + 'weights': [NodeWeight(weight_name='weight', + weight_location=1, + weight_value=np.zeros((64, 3, 1, 1), dtype=np.int32))]}, + 'expected_output': {'init_code': ['self.variable_name = nn.Conv2d(in_channels=3, out_channels=64, ' + 'kernel_size=(3, 4), stride=(1, 1), padding=(1, 3, 2, 4), ' + 'pad_mode=\"pad\", dilation=(1, 1), group=1, has_bias=False)'], + 'construct_code': ['opt_variable_name = self.variable_name(x)']} }, { 'input': {'op_name': 'onnx::Conv', 'params': {'dilations': [1, 1], 'group': 1, + 'kernel_shape': [3, 4], 'pads': [0, 0, 0, 0], 'strides': [1, 1]}, - 'weights': {'weight': np.zeros((64, 3, 2, 2), dtype=np.int32)}}, - 'expected_output': {} + 'weights': [NodeWeight(weight_name='weight', + weight_location=1, + weight_value=np.zeros((64, 3, 1, 1), dtype=np.int32)), + NodeWeight(weight_name='bias', + weight_location=2, + weight_value=np.zeros((64, 1, 1), dtype=np.int32))]}, + 'expected_output': {'init_code': ['self.variable_name = nn.Conv2d(in_channels=3, out_channels=64, ' + 'kernel_size=(3, 4), stride=(1, 1), padding=0, ' + 'pad_mode=\"valid\", dilation=(1, 1), group=1, has_bias=True)'], + 'construct_code': ['opt_variable_name = self.variable_name(x)']} }, { 'input': {'op_name': 'onnx::Gemm', - 'params': dict(), - 'weights': {'weight': np.zeros((10, 3), dtype=np.int32), - 'bias': np.zeros((10, 1), dtype=np.int32)}}, - 'expected_output': {} + 'params': {'alpha': 1.0, + 'beta': 1.0, + 'transA': 0, + 'transB': 0}, + 'weights': [NodeWeight(weight_name='weight', + weight_location=1, + weight_value=np.zeros((10, 3), dtype=np.int32)), + NodeWeight(weight_name='bias', + weight_location=2, + weight_value=np.zeros((10, 1), dtype=np.int32))]}, + 'expected_output': {'init_code': ['self.variable_name = nn.Dense(in_channels=3, out_channels=10, ' + 'has_bias=True)'], + 'construct_code': ['opt_variable_name = self.variable_name(x)']} }, { 'input': {'op_name': 'onnx::BatchNormalization', 'params': {'epsilon': 1e-5, 'momentum': 0.9, 'output_shape': (1, 6, 224, 224)}, - 'weights': dict()}, - 'expected_output': {} + 'weights': [NodeWeight(weight_name='scale', + weight_value=np.ones((6, 224, 224), dtype=np.int), + weight_location=1), + NodeWeight(weight_name='B', + weight_value=np.zeros((6, 224, 224), dtype=np.int), + weight_location=2), + NodeWeight(weight_name='mean', + weight_value=np.ones((6,), dtype=np.int), + weight_location=3), + NodeWeight(weight_name='var', + weight_value=np.ones((6,), dtype=np.int), + weight_location=4)]}, + 'expected_output': {'init_code': ['self.variable_name = nn.BatchNorm2d(num_features=6, eps=1e-5, ' + 'momentum=0.9)'], + 'construct_code': ['opt_variable_name = self.variable_name(x)']} }, { 'input': {'op_name': 'onnx::Relu', 'params': dict(), - 'weights': dict()}, - 'expected_output': {} + 'weights': []}, + 'expected_output': {'init_code': ['self.variable_name = nn.ReLU()'], + 'construct_code': ['opt_variable_name = self.variable_name(x)']} }, { 'input': {'op_name': 'onnx::MaxPool', - 'params': {'kernel_shape': [3, 3], - 'pads': [1, 1, 1, 1], + 'params': {'auto_pad': 'NOTSET', + 'ceil_mode': 0, + 'dilations': [1, 1], + 'kernel_shape': [3, 3], + 'pads': [0, 0, 0, 0], + 'storage_order': 0, 'strides': [2, 2], 'input_shape': (1, 3, 224, 224), 'output_shape': (1, 3, 112, 112)}, - 'weights': dict()}, - 'expected_output': {} + 'weights': []}, + 'expected_output': {'init_code': ['self.pad_variable_name = nn.Pad' + '(paddings=((0, 0), (0, 0), (0, 1), (0, 1)))', + 'self.variable_name = nn.MaxPool2d' + '(kernel_size=(3, 3), stride=(2, 2))'], + 'construct_code': ['opt_variable_name = self.pad_variable_name(x)', + 'opt_variable_name = self.variable_name(opt_variable_name)']} }, { 'input': {'op_name': 'onnx::AveragePool', - 'params': {'kernel_shape': [3, 3], - 'pads': [1, 1, 1, 1], + 'params': {'auto_pad': 'NOTSET', + 'ceil_mode': 0, + 'dilations': [1, 1], + 'kernel_shape': [5, 5], + 'pads': [0, 0, 0, 0], + 'storage_order': 0, 'strides': [2, 2], 'input_shape': (1, 3, 224, 224), 'output_shape': (1, 3, 112, 112)}, - 'weights': dict()}, - 'expected_output': {} + 'weights': []}, + 'expected_output': {'init_code': ['self.pad_variable_name = nn.Pad' + '(paddings=((0, 0), (0, 0), (1, 2), (1, 2)))', + 'self.variable_name = nn.AveragePool2d' + '(kernel_size=(5, 5), stride=(2, 2))'], + 'construct_code': ['opt_variable_name = self.pad_variable_name(x)', + 'opt_variable_name = self.variable_name(opt_variable_name))']} }, { 'input': {'op_name': 'onnx::GlobalAveragePool', 'params': {'input_shape': (1, 3, 10, 10), 'output_shape': (1, 3, 1, 1)}, - 'weights': ''}, - 'expected_output': {} + 'weights': []}, + 'expected_output': {'init_code': ['self.variable_name = nn.AvgPool2d(kernel_size=(10, 10))'], + 'construct_code': ['opt_variable_name = self.variable_name(x)']} }, { 'input': {'op_name': 'onnx::Flatten', 'params': dict(), - 'weights': dict()}, - 'expected_output': {} + 'weights': []}, + 'expected_output': {'init_code': ['self.variable_name = nn.Flatten'], + 'construct_code': ['opt_variable_name = self.variable_name(x)']} }, { 'input': {'op_name': 'onnx::Add', 'params': dict(), - 'weights': dict()}, - 'expected_output': {} + 'weights': [NodeWeight(weight_name='bias', + weight_value=np.ones((1, 3, 224, 224), dtype=np.int), + weight_location=0)]}, + 'expected_output': {'init_code': ['self.variable_name = P.Add()', + 'self.variable_name_bias = Parameter' + '(Tensor(np.random.uniform(0, 1, (1, 3, 224, 224)).astype(np.int64)), ' + 'name=None)'], + 'construct_code': ['opt_variable_name = self.variable_name' + '(self.variable_name_bias, x)']} + }, { + 'input': {'op_name': 'onnx::Add', + 'params': dict(), + 'weights': [NodeWeight(weight_name='bias', + weight_value=np.array(1, dtype=np.int), + weight_location=0)]}, + 'expected_output': {'init_code': ['self.variable_name = P.Add()', + 'self.variable_name_bias = 1'], + 'construct_code': ['opt_variable_name = self.variable_name' + '(self.variable_name_bias, x)']} }, { 'input': {'op_name': 'onnx::Pad', - 'params': {'pads': [0, 1, 2, 3], - 'value': 0, + 'params': {'value': 0, 'mode': 'constant'}, - 'weights': dict()}, + 'weights': [NodeWeight(weight_name='pads', + weight_value=np.array([0, 1, 2, 3], dtype=np.int), + weight_location=1)]}, 'expected_output': {} }, { 'input': {'op_name': 'onnx::Pad', 'params': {'pads': [0, 1, 2, 3], 'mode': 'reflect'}, - 'weights': dict()}, + 'weights': []}, 'expected_output': {} }, { 'input': {'op_name': 'onnx::Pad', @@ -114,45 +192,216 @@ class TestMappers: 'input': {'op_name': 'onnx::Pad', 'params': {'pads': [0, 1, 2, 3], 'mode': 'edge'}, - 'weights': dict()}, + 'weights': []}, 'expected_output': {} }, { 'input': {'op_name': 'onnx::ReduceMean', 'params': {'keepdims': 0, 'axes': [1, 2]}, - 'weights': dict()}, + 'weights': []}, 'expected_output': {} }, { 'input': {'op_name': 'onnx::ReduceMean', 'params': {'keepdims': 1, 'axes': [1]}, - 'weights': dict()}, + 'weights': []}, 'expected_output': {} }, { 'input': {'op_name': 'onnx::Concat', 'params': {'axis': 0}, - 'weights': dict()}, + 'weights': []}, 'expected_output': {} }, { 'input': {'op_name': 'onnx::Clip', - 'params': {'max': 6, - 'min': 0}, - 'weights': dict()}, + 'params': {}, + 'weights': [NodeWeight(weight_name='min_clip', + weight_value=0, + weight_location=1), + NodeWeight(weight_name='max_clip', + weight_value=6, + weight_location=2)]}, 'expected_output': {} }, { 'input': {'op_name': 'onnx::Clip', - 'params': dict(), - 'weights': dict()}, + 'params': {}, + 'weights': [NodeWeight(weight_name='min_clip', + weight_value=2, + weight_location=1), + NodeWeight(weight_name='max_clip', + weight_value=3, + weight_location=2)]}, 'expected_output': {} }, { - 'input': {'op_name': 'onnx::Clip', - 'params': {'max': 3, - 'min': 2}, - 'weights': dict()}, + 'input': {'op_name': 'onnx::MatMul', + 'params': {}, + 'weights': [NodeWeight(weight_name='w', + weight_value=np.ones((1, 3, 224, 224), dtype=np.int), + weight_location=1)]}, 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Sigmoid', + 'params': {}, + 'weights': []}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Softmax', + 'params': {'axis': 0.6}, + 'weights': []}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Tanh', + 'params': {}, + 'weights': []}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Cast', + 'params': {'to': 3}, + 'weights': []}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Div', + 'params': {}, + 'weights': [NodeWeight(weight_name='weight', + weight_value=np.ones((1, 3, 224, 224), dtype=np.int), + weight_location=0)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Div', + 'params': {}, + 'weights': [NodeWeight(weight_name='weight', + weight_value=np.array(1, dtype=np.int), + weight_location=1)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Einsum', + 'params': {'equation': 'bfnd, ndh -> bfh', + 'input_shape': (1, 3, 224, 224)}, + 'weights': [NodeWeight(weight_name='weight', + weight_value=np.ones((224, 224, 512), dtype=np.int), + weight_location=1)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Einsum', + 'params': {'equation': 'bfn, ndh -> bfdh', + 'input_shape': (1, 2, 3)}, + 'weights': [NodeWeight(weight_name='weight', + weight_value=np.ones((3, 4, 5), dtype=np.int), + weight_location=1)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Erf', + 'params': {}, + 'weights': []}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Unsqueeze', + 'params': {'axes': [1]}, + 'weights': []}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Unsqueeze', + 'params': {}, + 'weights': [NodeWeight(weight_name='axes', + weight_value=np.array([3, 1, 2], dtype=int), + weight_location=1)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Gather', + 'params': {'axis': 2}, + 'weights': [NodeWeight(weight_name='inputs', + weight_value=np.ones((1, 2, 3), dtype=np.int), + weight_location=0)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Gather', + 'params': {'axis': 2}, + 'weights': [NodeWeight(weight_name='inputs', + weight_value=np.array(2, dtype=np.int), + weight_location=1)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Mul', + 'params': {}, + 'weights': [NodeWeight(weight_name='inputs', + weight_value=np.ones((1, 2, 3), dtype=np.int), + weight_location=0)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Mul', + 'params': {}, + 'weights': [NodeWeight(weight_name='inputs', + weight_value=np.array(1, dtype=np.int), + weight_location=1)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Pow', + 'params': {}, + 'weights': [NodeWeight(weight_name='inputs', + weight_value=np.ones((1, 2, 3), dtype=np.int), + weight_location=0)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Pow', + 'params': {}, + 'weights': [NodeWeight(weight_name='inputs', + weight_value=np.array(1, dtype=np.int), + weight_location=1)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Reshape', + 'params': {'output_shape': (1, 2, 3)}, + 'weights': []}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Resize', + 'params': {'mode': 'linear', + 'output_shape': [2, 3]}, + 'weights': []}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Slice', + 'params': {'input_shape': (4, 5, 6)}, + 'weights': [NodeWeight(weight_name='starts', + weight_value=np.array([0, 1, 0], dtype=np.int), + weight_location=1), + NodeWeight(weight_name='ends', + weight_value=np.array([3, 4, 5], dtype=np.int), + weight_location=2), + NodeWeight(weight_name='axes', + weight_value=np.array([0, 2, 1], dtype=np.int), + weight_location=3), + NodeWeight(weight_name='steps', + weight_value=np.array([1, 2, 3], dtype=np.int), + weight_location=4)]}, + 'expected_output': {} + }, { + 'input': {'op_name': 'onnx::Slice', + 'params': {'input_shape': (4, 5, 6)}, + 'weights': [NodeWeight(weight_name='starts', + weight_value=np.array([1], dtype=np.int), + weight_location=1), + NodeWeight(weight_name='ends', + weight_value=np.array([4], dtype=np.int), + weight_location=2), + NodeWeight(weight_name='axes', + weight_value=np.array([1], dtype=np.int), + weight_location=3), + NodeWeight(weight_name='steps', + weight_value=np.array([2], dtype=np.int), + weight_location=4)]}, + 'expected_output': {'init_code': [], + 'construct_code': []} }]) def test_mapper(self, params): """Test mapper function.""" - _, _, _, _ = ONNXToMindSporeMapper.convert(params['input']['op_name'], - params['input']['params'], - params['input']['weights']) + expected_output = params['expected_output'] + template, exchange_msg, outputs_lists, outputs_mapping = \ + ONNXToMindSporeMapper.convert(params['input']['op_name'], + params['input']['params'], + params['input']['weights']) + + exchange_msg['var_0']['variable_name'] = 'variable_name' + exchange_msg['var_0']['inputs'] = ['x'] + + fragment = Fragment(data_entity=exchange_msg, code_template=template, outputs=outputs_lists, + outputs_mapping=outputs_mapping)