| @@ -13,9 +13,12 @@ | |||
| # limitations under the License. | |||
| # ============================================================================ | |||
| """Define common utils.""" | |||
| import os | |||
| import stat | |||
| from importlib import import_module | |||
| from typing import List | |||
| from typing import List, Tuple, Mapping | |||
| from mindinsight.mindconverter.common.log import logger as log | |||
| from mindinsight.mindconverter.graph_based_converter.constant import SEPARATOR_IN_ONNX_OP | |||
| @@ -63,3 +66,46 @@ def fetch_output_from_onnx_model(model, feed_dict: dict, output_nodes: List[str] | |||
| for idx, opt in enumerate(output_nodes): | |||
| run_result[opt] = fetched_res[idx] | |||
| return run_result | |||
| def save_code_file_and_report(model_name: str, code_lines: Mapping[str, Tuple], | |||
| out_folder: str, report_folder: str): | |||
| """ | |||
| Save code file and report. | |||
| Args: | |||
| model_name (str): Model name. | |||
| code_lines (dict): Code lines. | |||
| out_folder (str): Output folder. | |||
| report_folder (str): Report output folder. | |||
| """ | |||
| flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL | |||
| modes = stat.S_IRUSR | stat.S_IWUSR | |||
| modes_usr = stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | |||
| out_folder = os.path.realpath(out_folder) | |||
| if not report_folder: | |||
| report_folder = out_folder | |||
| else: | |||
| report_folder = os.path.realpath(report_folder) | |||
| if not os.path.exists(out_folder): | |||
| os.makedirs(out_folder, modes_usr) | |||
| if not os.path.exists(report_folder): | |||
| os.makedirs(report_folder, modes_usr) | |||
| for file_name in code_lines: | |||
| code, report = code_lines[file_name] | |||
| try: | |||
| with os.fdopen(os.open(os.path.realpath(os.path.join(out_folder, f"{model_name}.py")), | |||
| flags, modes), 'w') as file: | |||
| file.write(code) | |||
| with os.fdopen(os.open(os.path.realpath(os.path.join(report_folder, | |||
| f"report_of_{model_name}.txt")), | |||
| flags, stat.S_IRUSR), "w") as rpt_f: | |||
| rpt_f.write(report) | |||
| except IOError as error: | |||
| log.error(str(error)) | |||
| log.exception(error) | |||
| raise error | |||
| @@ -25,7 +25,7 @@ from treelib import Tree, Node | |||
| from mindinsight.mindconverter.common.log import logger as log | |||
| from .name_mgr import ModuleNameMgr, GlobalVarNameMgr | |||
| from ..common.utils import is_converted | |||
| from ..common.utils import is_converted, save_code_file_and_report | |||
| from ..mapper.base import Mapper | |||
| from ..third_party_graph.pytorch_graph_node import PyTorchGraphNode | |||
| from ..third_party_graph.onnx_graph_node import OnnxGraphNode | |||
| @@ -193,37 +193,7 @@ class HierarchicalTree(Tree): | |||
| log.error("Error occur when generating codes.") | |||
| raise e | |||
| out_folder = os.path.realpath(out_folder) | |||
| if not report_folder: | |||
| report_folder = out_folder | |||
| else: | |||
| report_folder = os.path.realpath(report_folder) | |||
| if not os.path.exists(out_folder): | |||
| os.makedirs(out_folder, self.modes_usr) | |||
| if not os.path.exists(report_folder): | |||
| os.makedirs(report_folder, self.modes_usr) | |||
| for file_name in code_fragments: | |||
| code, report = code_fragments[file_name] | |||
| try: | |||
| with os.fdopen(os.open(os.path.realpath(os.path.join(out_folder, f"{model_name}.py")), | |||
| self.flags, self.modes), 'w') as file: | |||
| file.write(code) | |||
| except IOError as error: | |||
| log.error(str(error)) | |||
| log.exception(error) | |||
| raise error | |||
| try: | |||
| with os.fdopen(os.open(os.path.realpath(os.path.join(report_folder, | |||
| f"report_of_{model_name}.txt")), | |||
| self.flags, stat.S_IRUSR), "w") as rpt_f: | |||
| rpt_f.write(report) | |||
| except IOError as error: | |||
| log.error(str(error)) | |||
| log.exception(error) | |||
| raise error | |||
| save_code_file_and_report(model_name, code_fragments, out_folder, report_folder) | |||
| def _preprocess_node_args(self, node, module_key): | |||
| """ | |||
| @@ -0,0 +1,37 @@ | |||
| # Copyright 2020 Huawei Technologies Co., Ltd.All Rights Reserved. | |||
| # | |||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||
| # you may not use this file except in compliance with the License. | |||
| # You may obtain a copy of the License at | |||
| # | |||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||
| # | |||
| # Unless required by applicable law or agreed to in writing, software | |||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| # ============================================================================== | |||
| """Mapper module.""" | |||
| from ...base import ONNXToMindSporeMapper | |||
| from ...gen_setting import Setting | |||
| class SigmoidMapper(ONNXToMindSporeMapper): | |||
| """Sigmoid mapper.""" | |||
| @staticmethod | |||
| def _operation_name_in_ms(*args, **kwargs): | |||
| return "nn.Sigmoid" | |||
| @staticmethod | |||
| def _convert_params(**kwargs): | |||
| return dict() | |||
| @staticmethod | |||
| def _convert_trained_weights(**kwargs): | |||
| return dict() | |||
| @staticmethod | |||
| def _convert_settings(**kwargs): | |||
| return Setting() | |||
| @@ -0,0 +1,42 @@ | |||
| # Copyright 2020 Huawei Technologies Co., Ltd.All Rights Reserved. | |||
| # | |||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||
| # you may not use this file except in compliance with the License. | |||
| # You may obtain a copy of the License at | |||
| # | |||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||
| # | |||
| # Unless required by applicable law or agreed to in writing, software | |||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| # ============================================================================== | |||
| """Mapper module.""" | |||
| from ...base import ONNXToMindSporeMapper | |||
| from ...gen_setting import Setting, Tensor, get_dtype | |||
| class MulMapper(ONNXToMindSporeMapper): | |||
| """Mul mapper.""" | |||
| @staticmethod | |||
| def _operation_name_in_ms(*args, **kwargs): | |||
| return "P.Mul" | |||
| @staticmethod | |||
| def _convert_params(**kwargs): | |||
| return dict() | |||
| @staticmethod | |||
| def _convert_trained_weights(**kwargs): | |||
| return dict() | |||
| @staticmethod | |||
| def _convert_settings(**kwargs): | |||
| weights = kwargs.get("weights") | |||
| if not weights: | |||
| return Setting() | |||
| ref, tensor = list(weights.items())[0] | |||
| return Setting(op_extra_tensor=Tensor(shape=tensor.shape, | |||
| dtype=get_dtype(tensor), reference=ref)) | |||
| @@ -16,5 +16,7 @@ | |||
| "onnx::MatMul": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.mat_mul_mapper.MatMulMapper", | |||
| "onnx::Softmax": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.softmax_mapper.SoftmaxMapper", | |||
| "onnx::Reshape": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.reshape_mapper.ReshapeMapper", | |||
| "onnx::Slice": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.slice_mapper.SliceMapper" | |||
| "onnx::Slice": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.slice_mapper.SliceMapper", | |||
| "onnx::Mul": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.mul_mapper.MulMapper", | |||
| "onnx::Sigmoid": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.sigmoid_mapper.SigmoidMapper" | |||
| } | |||
| @@ -105,11 +105,9 @@ class OnnxGraph(Graph): | |||
| "Graph construct a self-loop node %s. Ignored.", src) | |||
| return | |||
| if tgt not in self._nodes_collection[src.split(':')[0]].successor_nodes: | |||
| self._nodes_collection[src.split( | |||
| ':')[0]].successor_nodes.append(tgt) | |||
| self._nodes_collection[src.split(':')[0]].successor_nodes.append(tgt) | |||
| if src not in self._nodes_collection[tgt].precursor_nodes: | |||
| self._nodes_collection[tgt.split( | |||
| ':')[0]].precursor_nodes.append(src) | |||
| self._nodes_collection[tgt.split(':')[0]].precursor_nodes.append(src) | |||
| def build(self, input_shape=None): | |||
| """ | |||
| @@ -136,8 +134,7 @@ class OnnxGraph(Graph): | |||
| t_name = tensor.name | |||
| t_value = tensor.to_array() | |||
| node_weight[t_name] = t_value | |||
| self._nodes_collection[node_name] = OnnxGraphNode( | |||
| node, node_weight) | |||
| self._nodes_collection[node_name] = OnnxGraphNode(node, node_weight) | |||
| self._nodes_record[node_name] = node_name | |||
| for nd_ipt_name in node.precursor_onnx_node_dict: | |||
| @@ -183,6 +183,10 @@ class OnnxGraphNode(GraphNode): | |||
| declare, ipt_args_settings_in_construct = self._add_tensor_args_to_code( | |||
| 'onnx::Add', settings, declare, ipt_args_settings_in_construct, variable_name) | |||
| # Extra Tensor generator for onnx::Mul | |||
| declare, ipt_args_settings_in_construct = self._add_tensor_args_to_code( | |||
| 'onnx::Mul', settings, declare, ipt_args_settings_in_construct, variable_name) | |||
| call = f"{output_var} = self.{variable_name}({ipt_args_settings_in_construct})" | |||
| return declare, call | |||
| @@ -56,11 +56,6 @@ def convert_tf_graph_to_onnx(model_path, model_inputs, model_outputs, opset=12): | |||
| target = ",".join(constants.DEFAULT_TARGET) | |||
| shape_override = None | |||
| if not 'input' in model_inputs: | |||
| error_msg = "The given input node is not an eligible input node." | |||
| error = ValueError(error_msg) | |||
| log.error(str(error)) | |||
| raise error | |||
| if 'input' in model_outputs: | |||
| error_msg = "The given output node is an input node." | |||
| @@ -69,12 +64,12 @@ def convert_tf_graph_to_onnx(model_path, model_inputs, model_outputs, opset=12): | |||
| raise error | |||
| if model_inputs: | |||
| model_inputs, shape_override = utils.split_nodename_and_shape( | |||
| model_inputs) | |||
| model_inputs, shape_override = utils.split_nodename_and_shape(model_inputs) | |||
| if model_outputs: | |||
| model_outputs = model_outputs.split(',') | |||
| graph_def, inputs, outputs = tf_loader.from_graphdef( | |||
| model_path, model_inputs, model_outputs) | |||
| graph_def, inputs, outputs = tf_loader.from_graphdef(model_path, | |||
| model_inputs, | |||
| model_outputs) | |||
| with tf.Graph().as_default() as tf_graph: | |||
| tf.import_graph_def(graph_def, name='') | |||
| @@ -88,8 +83,7 @@ def convert_tf_graph_to_onnx(model_path, model_inputs, model_outputs, opset=12): | |||
| shape_override=shape_override, | |||
| input_names=inputs, | |||
| output_names=outputs, | |||
| inputs_as_nchw=None | |||
| ) | |||
| inputs_as_nchw=None) | |||
| opt_map = getattr(optimizer.back_to_back_optimizer, '_func_map') | |||
| if ('Conv', 'BatchNormalization') in opt_map: | |||
| opt_map.pop(('Conv', 'BatchNormalization')) | |||