Browse Source

fix`REAME` & Add UT & Optimize

tags/v1.1.0
moran 5 years ago
parent
commit
2f587fd511
15 changed files with 348 additions and 15 deletions
  1. +1
    -1
      mindinsight/mindconverter/README.md
  2. +1
    -1
      mindinsight/mindconverter/README_CN.md
  3. +1
    -1
      mindinsight/mindconverter/cli.py
  4. +2
    -2
      mindinsight/mindconverter/graph_based_converter/framework.py
  5. +3
    -0
      mindinsight/mindconverter/graph_based_converter/hierarchical_tree/hierarchical_tree.py
  6. +6
    -1
      mindinsight/mindconverter/graph_based_converter/mapper/impl/nn/conv_mapper.py
  7. +50
    -0
      mindinsight/mindconverter/graph_based_converter/mapper/impl/nn/pad_mapper.py
  8. +2
    -1
      mindinsight/mindconverter/graph_based_converter/mapper/onnx_to_ms.json
  9. +11
    -0
      mindinsight/mindconverter/graph_based_converter/third_party_graph/base.py
  10. +11
    -0
      mindinsight/mindconverter/graph_based_converter/third_party_graph/pytorch_graph_node.py
  11. +2
    -1
      tests/st/func/mindconverter/test_converter.py
  12. +17
    -0
      tests/ut/mindconverter/graph_based_converter/conftest.py
  13. +140
    -7
      tests/ut/mindconverter/graph_based_converter/hierarchical_tree/test_hierarchical_tree.py
  14. +15
    -0
      tests/ut/mindconverter/graph_based_converter/mapper/__init__.py
  15. +86
    -0
      tests/ut/mindconverter/graph_based_converter/mapper/test_mapper.py

+ 1
- 1
mindinsight/mindconverter/README.md View File

@@ -49,7 +49,7 @@ optional arguments:
to do script generation. When `--in_file` and
`--model_file` are both provided, use AST schema as
default.
--shape SHAPE Optional, excepted input tensor shape of
--shape SHAPE Optional, expected input tensor shape of
`--model_file`. It is required when use graph based
schema. Usage: --shape 3,244,244
--output OUTPUT Optional, specify path for converted script file


+ 1
- 1
mindinsight/mindconverter/README_CN.md View File

@@ -45,7 +45,7 @@ optional arguments:
to do script generation. When `--in_file` and
`--model_file` are both provided, use AST schema as
default.
--shape SHAPE Optional, excepted input tensor shape of
--shape SHAPE Optional, expected input tensor shape of
`--model_file`. It is required when use graph based
schema. Usage: --shape 3,244,244
--output OUTPUT Optional, specify path for converted script file


+ 1
- 1
mindinsight/mindconverter/cli.py View File

@@ -247,7 +247,7 @@ parser.add_argument(
default=None,
required=False,
help="""
Optional, excepted input tensor shape of
Optional, expected input tensor shape of
`--model_file`. It's required when use graph based
schema.
Usage: --shape 3,244,244


+ 2
- 2
mindinsight/mindconverter/graph_based_converter/framework.py View File

@@ -20,8 +20,8 @@ from importlib.util import find_spec

import mindinsight
from mindinsight.mindconverter.common.log import logger as log
from .mapper import ONNXToMindSporeMapper
from ..common.exceptions import NodeTypeNotSupport
from mindinsight.mindconverter.graph_based_converter.mapper import ONNXToMindSporeMapper
from mindinsight.mindconverter.common.exceptions import NodeTypeNotSupport

permissions = os.R_OK | os.W_OK | os.X_OK
os.umask(permissions << 3 | permissions)


+ 3
- 0
mindinsight/mindconverter/graph_based_converter/hierarchical_tree/hierarchical_tree.py View File

@@ -372,6 +372,9 @@ class HierarchicalTree(Tree):
"""
nonlocal node

if node.predecessor(self.tree_identifier) is None:
return False

tgt_type = {NodeType.MODULE.value,
NodeType.FUNC.value, NodeType.CLASS.value}
md_type_lst = [self.get_node(child).data.node_type


+ 6
- 1
mindinsight/mindconverter/graph_based_converter/mapper/impl/nn/conv_mapper.py View File

@@ -67,4 +67,9 @@ class ConvMapper(ONNXToMindSporeMapper):
def _convert_padding(params):
if sum(params['pads']) == 0:
return '\"valid\"', 0
return '\"pad\"', tuple(params['pads'])
pads_onnx = params['pads']
half_index = len(pads_onnx) // 2
padding = []
for num_begin, num_end in zip(pads_onnx[:half_index], pads_onnx[half_index:]):
padding += [num_begin, num_end]
return '\"pad\"', tuple(padding)

+ 50
- 0
mindinsight/mindconverter/graph_based_converter/mapper/impl/nn/pad_mapper.py View File

@@ -0,0 +1,50 @@
# Copyright 2020 Huawei Technologies Co., Ltd.All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Mapper module."""
from ...base import ONNXToMindSporeMapper


class PadMapper(ONNXToMindSporeMapper):
"""Pad mapper."""

@staticmethod
def _operation_name_in_ms(*args, **kwargs):
return "nn.Pad"

@staticmethod
def _convert_params(params, weights):
if params['mode'] == 'constant':
if params['value'] == 0:
mode = '\"CONSTANT\"'
else:
msg = f"[NOT support value is NOT 0]\"CONSTANT\""
mode = msg
elif params['mode'] == 'reflect':
mode = '\"REFLECT\"'
else:
msg = f"[NOT support {params['mode']}]\"UNKNOWN\""
mode = msg
pads_onnx = params['pads']
half_index = len(pads_onnx) // 2
paddings = (
(num_begin, num_end) for num_begin, num_end in zip(pads_onnx[:half_index], pads_onnx[half_index:]))
return {'paddings': tuple(paddings),
'mode': mode}

@staticmethod
def _convert_trained_weights(weights):
if weights:
pass
return dict()

+ 2
- 1
mindinsight/mindconverter/graph_based_converter/mapper/onnx_to_ms.json View File

@@ -7,5 +7,6 @@
"onnx::AveragePool": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.pool_mapper.PoolMapper",
"onnx::GlobalAveragePool": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.global_pool_mapper.GlobalPoolMapper",
"onnx::Flatten": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.flatten_mapper.FlattenMapper",
"onnx::Add": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.add_mapper.AddMapper"
"onnx::Add": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.add_mapper.AddMapper",
"onnx::Pad": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.pad_mapper.PadMapper"
}

+ 11
- 0
mindinsight/mindconverter/graph_based_converter/third_party_graph/base.py View File

@@ -429,6 +429,17 @@ class GraphNode(abc.ABC):
"""
return self._scope_name

@scope_name.setter
def scope_name(self, name):
"""
Setter of scope name.

Args:
name(str): Scope name.

"""
self._scope_name = name

@property
def node_params(self):
"""Get node params (ONNX op params)."""


+ 11
- 0
mindinsight/mindconverter/graph_based_converter/third_party_graph/pytorch_graph_node.py View File

@@ -144,6 +144,17 @@ class PyTorchGraphNode(GraphNode):
"""
return self._op_name

@op_name.setter
def op_name(self, name):
"""
Setter of op name.

Args:
name(str): op_name.

"""
self._op_name = name

@property
def real_name(self):
return


+ 2
- 1
tests/st/func/mindconverter/test_converter.py View File

@@ -14,7 +14,7 @@
# ============================================================================

"""
Fuction:
Function:
Test mindconverter to convert user's PyTorch network script.
Usage:
pytest tests/st/func/mindconverter
@@ -36,6 +36,7 @@ class TestConverter:
def setup_class(cls):
"""Setup method."""
cls.script_dir = os.path.join(os.path.dirname(__file__), 'data')
cls.pytorch_dir = '/home/test/mindinsight_sample'
sys.path.insert(0, cls.script_dir)

@classmethod


+ 17
- 0
tests/ut/mindconverter/graph_based_converter/conftest.py View File

@@ -0,0 +1,17 @@
# Copyright 2020 Huawei Technologies Co., Ltd.All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Constant definition in unit tests."""

TEST_BASE_PATH = 'mindinsight.mindconverter.graph_based_converter'

+ 140
- 7
tests/ut/mindconverter/graph_based_converter/hierarchical_tree/test_hierarchical_tree.py View File

@@ -12,27 +12,160 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Test Name manager module."""
from unittest import mock, TestCase
"""Test hierarchical tree module."""
import os
import shutil
from unittest import mock

import pytest

from mindinsight.mindconverter.graph_based_converter.hierarchical_tree.hierarchical_tree import HierarchicalTree
from mindinsight.mindconverter.graph_based_converter.third_party_graph.pytorch_graph_node import PyTorchGraphNode
from mindinsight.mindconverter.graph_based_converter.mapper.base import ONNXToMindSporeMapper
from mindinsight.mindconverter.graph_based_converter.constant import NodeType

from tests.ut.mindconverter.graph_based_converter.conftest import TEST_BASE_PATH

class TestHierarchicalTree(TestCase):

class TestHierarchicalTree:
"""Test the class of HierarchicalTree."""

def test_tree_identifier(self):
"""Test tree_identifier"""
tree = HierarchicalTree()
self.assertIsInstance(tree.tree_identifier, str)
assert isinstance(tree.tree_identifier, str)

@mock.patch(
'mindinsight.mindconverter.graph_based_converter.' \
'third_party_graph.pytorch_graph_node.PyTorchGraphNode._get_raw_params')
'.'.join((TEST_BASE_PATH, 'third_party_graph.pytorch_graph_node.PyTorchGraphNode._get_raw_params')))
def test_insert(self, get_raw_params):
"""Test insert"""
get_raw_params.return_value = []
tree = HierarchicalTree()
pt_node = PyTorchGraphNode()
tree.insert(pt_node, 'ResNet', (1, 3, 224, 224), (1, 64, 112, 112))
self.assertEqual(tree.root, 'ResNet')
assert tree.root == 'ResNet'

def test_remove(self):
"""Test remove function."""
tree = HierarchicalTree()
tree.create_node(
tag='node_root',
identifier='root',
parent=None,
data=None
)
node = tree.get_node('root')
tree.remove(node)
assert tree.root is None

@mock.patch(
'.'.join((TEST_BASE_PATH, 'third_party_graph.pytorch_graph_node.PyTorchGraphNode._get_raw_params')))
def test_shrink(self, get_raw_params):
"""Test shrink function."""
params = {'root': {},
'root/child0': {},
'root/child0/child1': {}}
tree = self._create_tree(get_raw_params=get_raw_params, params=params)
node = tree.get_node('root/child0')
tree.shrink(node)
assert tree.leaves()[0].tag == 'child1'

@pytest.mark.parametrize('params', [{
'tree_params': {'root': {'op_name': 'Root',
'precursor_nodes': [],
'successor_nodes': ['root/relu'],
'node_type': NodeType.MODULE.value,
'input_shape': [1, 3, 224, 224],
'output_shape': [1, 1, 224, 224]},
'root/relu': {'op_name': 'onnx::Relu',
'precursor_nodes': ['root'],
'successor_nodes': ['root/unknown'],
'node_type': NodeType.OPERATION.value,
'input_shape': [1, 3, 224, 224],
'output_shape': [1, 3, 224, 224]},
'root/unknown': {'op_name': 'onnx::Unknown',
'precursor_nodes': ['root/relu'],
'successor_nodes': [],
'node_type': NodeType.OPERATION.value,
'input_shape': [1, 3, 224, 224],
'output_shape': [1, 1, 224, 224]}},
'report_dir': 'report_folder'
}, {
'tree_params': {'root': {'op_name': 'Root',
'precursor_nodes': [],
'successor_nodes': ['root/relu'],
'node_type': NodeType.MODULE.value,
'input_shape': [1, 3, 224, 224],
'output_shape': [1, 1, 224, 224]},
'root/relu': {'op_name': 'onnx::Relu',
'precursor_nodes': ['root'],
'successor_nodes': ['root/unknown'],
'node_type': NodeType.OPERATION.value,
'input_shape': [1, 3, 224, 224],
'output_shape': [1, 3, 224, 224]},
'root/unknown': {'op_name': 'onnx::Unknown',
'precursor_nodes': ['root/relu'],
'successor_nodes': [],
'node_type': NodeType.OPERATION.value,
'input_shape': [1, 3, 224, 224],
'output_shape': [1, 1, 224, 224]}},
'report_dir': None
}])
@mock.patch(
'.'.join((TEST_BASE_PATH, 'third_party_graph.pytorch_graph_node.PyTorchGraphNode._get_raw_params')))
def test_save_source_file(self, get_raw_params, params):
"""Test save_source_file function."""
tree_params = params['tree_params']
out_folder = 'out_folder'
report_folder = params['report_dir']
model_name = 'model_name'
mapper = ONNXToMindSporeMapper()

tree = self._create_tree(get_raw_params=get_raw_params, params=tree_params)
tree.save_source_files(out_folder, mapper, model_name, report_folder)

out_path = os.path.realpath(os.path.join(out_folder, f"{model_name}.py"))
report_folder_test = report_folder if report_folder else out_folder
report_path = os.path.realpath(
os.path.join(report_folder_test, f"report_of_{model_name}.txt"))
try:
assert os.path.exists(out_path)
assert os.path.exists(report_path)
with open(out_path, 'r') as out_r:
code = out_r.read()
assert 'nn.ReLU' in code
assert 'onnx.Unknown' in code
with open(report_path, 'r') as report_r:
report = report_r.read()
assert "[UnConvert] 'onnx::Unknown' didn't convert." in report
assert "Converted Rate: 50.00%." in report
finally:
shutil.rmtree(out_folder)
if report_folder:
shutil.rmtree(report_folder)

@staticmethod
def _create_tree(get_raw_params, params):
"""Create tree."""
tree = HierarchicalTree()
for key, val in params.items():
input_shape = val['input_shape'] if val.get('input_shape') else []
output_shape = val['output_shape'] if val.get('output_shape') else []
get_raw_params.return_value = val['op_params'] if val.get('op_params') else dict()
weight = val['weight'] if val.get('weight') else None

node = PyTorchGraphNode(weight=weight)
node.add_input_and_output_shape(input_shape, output_shape)
node.tag = key.split('/')[-1] if len(key.split('/')) > 1 else key
node.op_name = val['op_name'] if val.get('op_name') else None
node.precursor_nodes = val['precursor_nodes'] if val.get('precursor_nodes') else []
node.successor_nodes = val['successor_nodes'] if val.get('successor_nodes') else []
node.node_type = val['node_type'] if val.get('node_type') else None

tree.create_node(
tag=node.tag,
identifier=key,
parent='/'.join(key.split('/')[:-1]) if len(key.split('/')) > 1 else None,
data=node
)
return tree

+ 15
- 0
tests/ut/mindconverter/graph_based_converter/mapper/__init__.py View File

@@ -0,0 +1,15 @@
# Copyright 2020 Huawei Technologies Co., Ltd.All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unit test for mindconvert.graph_based_converter.mapper interface."""

+ 86
- 0
tests/ut/mindconverter/graph_based_converter/mapper/test_mapper.py View File

@@ -0,0 +1,86 @@
# Copyright 2020 Huawei Technologies Co., Ltd.All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test all operator mappers on transformation from pytorch to mindspore."""

import pytest

from mindinsight.mindconverter.graph_based_converter.mapper.base import ONNXToMindSporeMapper


class TestMappers:
"""Test Mappers."""
@pytest.mark.parametrize('params', [{
'input': {'op_name': 'onnx::BatchNormalization',
'params': {'epsilon': 1e-5,
'momentum': 0.9,
'output_shape': (1, 6, 224, 224)},
'weights': dict()},
'expected_output': {'converter_name': 'nn.BatchNorm2d',
'converted_params': {'num_features': 6,
'eps': 1e-5,
'momentum': 0.9}}
}, {
'input': {'op_name': 'onnx::Relu',
'params': dict(),
'weights': dict()},
'expected_output': {'converter_name': 'nn.ReLU',
'converted_params': dict()}
}, {
'input': {'op_name': 'onnx::MaxPool',
'params': {'kernel_shape': [3, 3],
'pads': [1, 1, 1, 1],
'strides': [2, 2]},
'weights': dict()},
'expected_output': {'converter_name': 'nn.MaxPool2d',
'converted_params': {'kernel_size': (3, 3),
'stride': (2, 2),
'pad_mode': '"same"'}}
}, {
'input': {'op_name': 'onnx::AveragePool',
'params': {'kernel_shape': [3, 3],
'pads': [1, 1, 1, 1],
'strides': [2, 2]},
'weights': dict()},
'expected_output': {'converter_name': 'nn.AvgPool2d',
'converted_params': {'kernel_size': (3, 3),
'stride': (2, 2),
'pad_mode': '"same"'}}
}, {
'input': {'op_name': 'onnx::GlobalAveragePool',
'params': {'input_shape': (1, 3, 10, 10),
'output_shape': (1, 3, 1, 1)},
'weights': ''},
'expected_output': {'converter_name': 'nn.AvgPool2d',
'converted_params': {'kernel_size': (10, 10)}}
}, {
'input': {'op_name': 'onnx::Flatten',
'params': dict(),
'weights': dict()},
'expected_output': {'converter_name': 'nn.Flatten',
'converted_params': dict()}
}, {
'input': {'op_name': 'onnx::Add',
'params': dict(),
'weights': dict()},
'expected_output': {'converter_name': 'P.TensorAdd',
'converted_params': dict()}
}])
def test_mapper(self, params):
"""Test mapper function."""
mapper = ONNXToMindSporeMapper()
converter_name, converted_params = \
mapper.convert(params['input']['op_name'], params['input']['params'], params['input']['weights'])
assert params['expected_output']['converter_name'] == converter_name
assert params['expected_output']['converted_params'] == converted_params

Loading…
Cancel
Save