Browse Source

Add mapper of mul and sigmoid, and extract codes saving logic to single function.

tags/v1.1.0
liuchongming 5 years ago
parent
commit
68897adf67
8 changed files with 143 additions and 51 deletions
  1. +47
    -1
      mindinsight/mindconverter/graph_based_converter/common/utils.py
  2. +2
    -32
      mindinsight/mindconverter/graph_based_converter/hierarchical_tree/hierarchical_tree.py
  3. +37
    -0
      mindinsight/mindconverter/graph_based_converter/mapper/impl/nn/sigmoid_mapper.py
  4. +42
    -0
      mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/mul_mapper.py
  5. +3
    -1
      mindinsight/mindconverter/graph_based_converter/mapper/onnx_to_ms.json
  6. +3
    -6
      mindinsight/mindconverter/graph_based_converter/third_party_graph/onnx_graph.py
  7. +4
    -0
      mindinsight/mindconverter/graph_based_converter/third_party_graph/onnx_graph_node.py
  8. +5
    -11
      mindinsight/mindconverter/graph_based_converter/third_party_graph/onnx_utils.py

+ 47
- 1
mindinsight/mindconverter/graph_based_converter/common/utils.py View File

@@ -13,9 +13,12 @@
# limitations under the License.
# ============================================================================
"""Define common utils."""
import os
import stat
from importlib import import_module
from typing import List
from typing import List, Tuple, Mapping

from mindinsight.mindconverter.common.log import logger as log
from mindinsight.mindconverter.graph_based_converter.constant import SEPARATOR_IN_ONNX_OP


@@ -63,3 +66,46 @@ def fetch_output_from_onnx_model(model, feed_dict: dict, output_nodes: List[str]
for idx, opt in enumerate(output_nodes):
run_result[opt] = fetched_res[idx]
return run_result


def save_code_file_and_report(model_name: str, code_lines: Mapping[str, Tuple],
out_folder: str, report_folder: str):
"""
Save code file and report.

Args:
model_name (str): Model name.
code_lines (dict): Code lines.
out_folder (str): Output folder.
report_folder (str): Report output folder.

"""
flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
modes = stat.S_IRUSR | stat.S_IWUSR
modes_usr = stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR

out_folder = os.path.realpath(out_folder)
if not report_folder:
report_folder = out_folder
else:
report_folder = os.path.realpath(report_folder)

if not os.path.exists(out_folder):
os.makedirs(out_folder, modes_usr)
if not os.path.exists(report_folder):
os.makedirs(report_folder, modes_usr)

for file_name in code_lines:
code, report = code_lines[file_name]
try:
with os.fdopen(os.open(os.path.realpath(os.path.join(out_folder, f"{model_name}.py")),
flags, modes), 'w') as file:
file.write(code)
with os.fdopen(os.open(os.path.realpath(os.path.join(report_folder,
f"report_of_{model_name}.txt")),
flags, stat.S_IRUSR), "w") as rpt_f:
rpt_f.write(report)
except IOError as error:
log.error(str(error))
log.exception(error)
raise error

+ 2
- 32
mindinsight/mindconverter/graph_based_converter/hierarchical_tree/hierarchical_tree.py View File

@@ -25,7 +25,7 @@ from treelib import Tree, Node
from mindinsight.mindconverter.common.log import logger as log

from .name_mgr import ModuleNameMgr, GlobalVarNameMgr
from ..common.utils import is_converted
from ..common.utils import is_converted, save_code_file_and_report
from ..mapper.base import Mapper
from ..third_party_graph.pytorch_graph_node import PyTorchGraphNode
from ..third_party_graph.onnx_graph_node import OnnxGraphNode
@@ -193,37 +193,7 @@ class HierarchicalTree(Tree):
log.error("Error occur when generating codes.")
raise e

out_folder = os.path.realpath(out_folder)
if not report_folder:
report_folder = out_folder
else:
report_folder = os.path.realpath(report_folder)

if not os.path.exists(out_folder):
os.makedirs(out_folder, self.modes_usr)
if not os.path.exists(report_folder):
os.makedirs(report_folder, self.modes_usr)

for file_name in code_fragments:
code, report = code_fragments[file_name]
try:
with os.fdopen(os.open(os.path.realpath(os.path.join(out_folder, f"{model_name}.py")),
self.flags, self.modes), 'w') as file:
file.write(code)
except IOError as error:
log.error(str(error))
log.exception(error)
raise error

try:
with os.fdopen(os.open(os.path.realpath(os.path.join(report_folder,
f"report_of_{model_name}.txt")),
self.flags, stat.S_IRUSR), "w") as rpt_f:
rpt_f.write(report)
except IOError as error:
log.error(str(error))
log.exception(error)
raise error
save_code_file_and_report(model_name, code_fragments, out_folder, report_folder)

def _preprocess_node_args(self, node, module_key):
"""


+ 37
- 0
mindinsight/mindconverter/graph_based_converter/mapper/impl/nn/sigmoid_mapper.py View File

@@ -0,0 +1,37 @@
# Copyright 2020 Huawei Technologies Co., Ltd.All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Mapper module."""
from ...base import ONNXToMindSporeMapper
from ...gen_setting import Setting


class SigmoidMapper(ONNXToMindSporeMapper):
"""Sigmoid mapper."""

@staticmethod
def _operation_name_in_ms(*args, **kwargs):
return "nn.Sigmoid"

@staticmethod
def _convert_params(**kwargs):
return dict()

@staticmethod
def _convert_trained_weights(**kwargs):
return dict()

@staticmethod
def _convert_settings(**kwargs):
return Setting()

+ 42
- 0
mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/mul_mapper.py View File

@@ -0,0 +1,42 @@
# Copyright 2020 Huawei Technologies Co., Ltd.All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Mapper module."""
from ...base import ONNXToMindSporeMapper
from ...gen_setting import Setting, Tensor, get_dtype


class MulMapper(ONNXToMindSporeMapper):
"""Mul mapper."""

@staticmethod
def _operation_name_in_ms(*args, **kwargs):
return "P.Mul"

@staticmethod
def _convert_params(**kwargs):
return dict()

@staticmethod
def _convert_trained_weights(**kwargs):
return dict()

@staticmethod
def _convert_settings(**kwargs):
weights = kwargs.get("weights")
if not weights:
return Setting()
ref, tensor = list(weights.items())[0]
return Setting(op_extra_tensor=Tensor(shape=tensor.shape,
dtype=get_dtype(tensor), reference=ref))

+ 3
- 1
mindinsight/mindconverter/graph_based_converter/mapper/onnx_to_ms.json View File

@@ -16,5 +16,7 @@
"onnx::MatMul": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.mat_mul_mapper.MatMulMapper",
"onnx::Softmax": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.softmax_mapper.SoftmaxMapper",
"onnx::Reshape": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.reshape_mapper.ReshapeMapper",
"onnx::Slice": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.slice_mapper.SliceMapper"
"onnx::Slice": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.slice_mapper.SliceMapper",
"onnx::Mul": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.mul_mapper.MulMapper",
"onnx::Sigmoid": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.sigmoid_mapper.SigmoidMapper"
}

+ 3
- 6
mindinsight/mindconverter/graph_based_converter/third_party_graph/onnx_graph.py View File

@@ -105,11 +105,9 @@ class OnnxGraph(Graph):
"Graph construct a self-loop node %s. Ignored.", src)
return
if tgt not in self._nodes_collection[src.split(':')[0]].successor_nodes:
self._nodes_collection[src.split(
':')[0]].successor_nodes.append(tgt)
self._nodes_collection[src.split(':')[0]].successor_nodes.append(tgt)
if src not in self._nodes_collection[tgt].precursor_nodes:
self._nodes_collection[tgt.split(
':')[0]].precursor_nodes.append(src)
self._nodes_collection[tgt.split(':')[0]].precursor_nodes.append(src)

def build(self, input_shape=None):
"""
@@ -136,8 +134,7 @@ class OnnxGraph(Graph):
t_name = tensor.name
t_value = tensor.to_array()
node_weight[t_name] = t_value
self._nodes_collection[node_name] = OnnxGraphNode(
node, node_weight)
self._nodes_collection[node_name] = OnnxGraphNode(node, node_weight)
self._nodes_record[node_name] = node_name

for nd_ipt_name in node.precursor_onnx_node_dict:


+ 4
- 0
mindinsight/mindconverter/graph_based_converter/third_party_graph/onnx_graph_node.py View File

@@ -183,6 +183,10 @@ class OnnxGraphNode(GraphNode):
declare, ipt_args_settings_in_construct = self._add_tensor_args_to_code(
'onnx::Add', settings, declare, ipt_args_settings_in_construct, variable_name)

# Extra Tensor generator for onnx::Mul
declare, ipt_args_settings_in_construct = self._add_tensor_args_to_code(
'onnx::Mul', settings, declare, ipt_args_settings_in_construct, variable_name)

call = f"{output_var} = self.{variable_name}({ipt_args_settings_in_construct})"

return declare, call


+ 5
- 11
mindinsight/mindconverter/graph_based_converter/third_party_graph/onnx_utils.py View File

@@ -56,11 +56,6 @@ def convert_tf_graph_to_onnx(model_path, model_inputs, model_outputs, opset=12):

target = ",".join(constants.DEFAULT_TARGET)
shape_override = None
if not 'input' in model_inputs:
error_msg = "The given input node is not an eligible input node."
error = ValueError(error_msg)
log.error(str(error))
raise error

if 'input' in model_outputs:
error_msg = "The given output node is an input node."
@@ -69,12 +64,12 @@ def convert_tf_graph_to_onnx(model_path, model_inputs, model_outputs, opset=12):
raise error

if model_inputs:
model_inputs, shape_override = utils.split_nodename_and_shape(
model_inputs)
model_inputs, shape_override = utils.split_nodename_and_shape(model_inputs)
if model_outputs:
model_outputs = model_outputs.split(',')
graph_def, inputs, outputs = tf_loader.from_graphdef(
model_path, model_inputs, model_outputs)
graph_def, inputs, outputs = tf_loader.from_graphdef(model_path,
model_inputs,
model_outputs)

with tf.Graph().as_default() as tf_graph:
tf.import_graph_def(graph_def, name='')
@@ -88,8 +83,7 @@ def convert_tf_graph_to_onnx(model_path, model_inputs, model_outputs, opset=12):
shape_override=shape_override,
input_names=inputs,
output_names=outputs,
inputs_as_nchw=None
)
inputs_as_nchw=None)
opt_map = getattr(optimizer.back_to_back_optimizer, '_func_map')
if ('Conv', 'BatchNormalization') in opt_map:
opt_map.pop(('Conv', 'BatchNormalization'))


Loading…
Cancel
Save