| @@ -168,3 +168,17 @@ def get_dict_key_by_value(val, dic): | |||||
| if d_val == val: | if d_val == val: | ||||
| return d_key | return d_key | ||||
| return None | return None | ||||
| def convert_bytes_string_to_string(bytes_str): | |||||
| """ | |||||
| Convert a byte string to string by utf-8. | |||||
| Args: | |||||
| bytes_str (bytes): A bytes string. | |||||
| Returns: | |||||
| str, a str with utf-8 encoding. | |||||
| """ | |||||
| if isinstance(bytes_str, bytes): | |||||
| return bytes_str.decode('utf-8') | |||||
| return bytes_str | |||||
| @@ -16,6 +16,7 @@ | |||||
| import numpy as np | import numpy as np | ||||
| from ...base import ONNXToMindSporeMapper | from ...base import ONNXToMindSporeMapper | ||||
| from ...gen_setting import Setting | from ...gen_setting import Setting | ||||
| from ....common import utils | |||||
| def _convert_padding(**kwargs): | def _convert_padding(**kwargs): | ||||
| @@ -80,6 +81,10 @@ class ConvMapper(ONNXToMindSporeMapper): | |||||
| if weight is None: | if weight is None: | ||||
| raise ValueError("Conv. Mapper cannot get the weight.") | raise ValueError("Conv. Mapper cannot get the weight.") | ||||
| auto_pad = None | |||||
| if params.get("auto_pad") is not None: | |||||
| auto_pad = utils.convert_bytes_string_to_string(params.get("auto_pad")) | |||||
| # tmp tf translated ver. mapping | # tmp tf translated ver. mapping | ||||
| if isinstance(params.get('dilations'), list): | if isinstance(params.get('dilations'), list): | ||||
| dilation = tuple(params.get('dilations')) | dilation = tuple(params.get('dilations')) | ||||
| @@ -102,6 +107,10 @@ class ConvMapper(ONNXToMindSporeMapper): | |||||
| pad_mode, padding = _convert_padding(params=params) | pad_mode, padding = _convert_padding(params=params) | ||||
| if auto_pad == "SAME_UPPER": | |||||
| pad_mode = "\'same\'" | |||||
| padding = 0 | |||||
| return { | return { | ||||
| 'in_channels': in_channels, | 'in_channels': in_channels, | ||||
| 'out_channels': out_channels, | 'out_channels': out_channels, | ||||
| @@ -0,0 +1,76 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd.All Rights Reserved. | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================== | |||||
| """Mapper module.""" | |||||
| from ...base import ONNXToMindSporeMapper | |||||
| from ...gen_setting import Setting | |||||
| from ....common import utils | |||||
| class ResizeMapper(ONNXToMindSporeMapper): | |||||
| """Resize mapper.""" | |||||
| @staticmethod | |||||
| def _operation_name_in_ms(*args, **kwargs): | |||||
| params = kwargs.get("params") | |||||
| onnx_coordinate_transform = params.get("coordinate_transformation_mode") | |||||
| if onnx_coordinate_transform is not None: | |||||
| onnx_coordinate_transform = utils.convert_bytes_string_to_string(onnx_coordinate_transform) | |||||
| interpolation_mode = params.get("mode") | |||||
| if interpolation_mode is not None: | |||||
| interpolation_mode = utils.convert_bytes_string_to_string(interpolation_mode) | |||||
| # Define which MindSpore Resize operator to be used | |||||
| if interpolation_mode == "linear": | |||||
| return "P.ResizeBilinear" | |||||
| if interpolation_mode == "nearest": | |||||
| return "P.ResizeNearestNeighbor" | |||||
| # For undefined situation, use bilinear as default. | |||||
| return "P.ResizeBilinear" | |||||
| @staticmethod | |||||
| def _convert_params(**kwargs): | |||||
| weights = kwargs.get("weights") | |||||
| params = kwargs.get("params") | |||||
| # Set default params | |||||
| align_corners = False | |||||
| if len(weights) > 3: | |||||
| raise ValueError("For resize, `weights` length less or equal to 3.") | |||||
| onnx_coordinate_transform = params.get("coordinate_transformation_mode") | |||||
| if onnx_coordinate_transform is not None: | |||||
| onnx_coordinate_transform = utils.convert_bytes_string_to_string(onnx_coordinate_transform) | |||||
| if onnx_coordinate_transform == "align_corners" or "half_pixel" in onnx_coordinate_transform: | |||||
| align_corners = True | |||||
| # Get requested size for resize | |||||
| size = list(weights.values())[-1][-2:].tolist() | |||||
| return {"size": tuple(size), | |||||
| "align_corners": align_corners} | |||||
| @staticmethod | |||||
| def _convert_trained_weights(**kwargs): | |||||
| return dict() | |||||
| @staticmethod | |||||
| def _convert_settings(**kwargs): | |||||
| if kwargs.get("weights", None): | |||||
| return Setting() | |||||
| return Setting() | |||||
| @@ -19,5 +19,6 @@ | |||||
| "onnx::Slice": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.slice_mapper.SliceMapper", | "onnx::Slice": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.slice_mapper.SliceMapper", | ||||
| "onnx::Mul": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.mul_mapper.MulMapper", | "onnx::Mul": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.mul_mapper.MulMapper", | ||||
| "onnx::Sigmoid": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.sigmoid_mapper.SigmoidMapper", | "onnx::Sigmoid": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.sigmoid_mapper.SigmoidMapper", | ||||
| "onnx::Split": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.split_mapper.SplitMapper" | |||||
| "onnx::Split": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.split_mapper.SplitMapper", | |||||
| "onnx::Resize": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.resize_mapper.ResizeMapper" | |||||
| } | } | ||||
| @@ -131,6 +131,12 @@ class ReportGenerator(metaclass=abc.ABCMeta): | |||||
| for num_line in range(0, num_all_lines): | for num_line in range(0, num_all_lines): | ||||
| code_line = code_lines[num_line] | code_line = code_lines[num_line] | ||||
| if 'P.ResizeNearestNeighbor' in code_line: | |||||
| warning_msg = f"[WARNING] {num_line + 1}:{code_line.index('P.ResizeNearestNeighbor') + 1} " \ | |||||
| f"The operator ResizeNearestNeighbor may not be converted accurately. " \ | |||||
| f"Please check its parameters with your original model and MindSpore official documents." | |||||
| self._content = f"{NEW_LINE}".join((self._content, warning_msg)) | |||||
| if 'onnx.' in code_line: | if 'onnx.' in code_line: | ||||
| num_unconverted_operator += 1 | num_unconverted_operator += 1 | ||||
| unconverted_operator = SEPARATOR_IN_ONNX_OP.join( | unconverted_operator = SEPARATOR_IN_ONNX_OP.join( | ||||
| @@ -278,6 +278,9 @@ class OnnxDataLoader: | |||||
| # Key is edge of ONNX ir graph, value is the corresponding precursor node. | # Key is edge of ONNX ir graph, value is the corresponding precursor node. | ||||
| self.output_name_to_node_name = dict() | self.output_name_to_node_name = dict() | ||||
| # Define dynamic nodes to be evaluated with onnxruntime | |||||
| self.dynamic_resize_node = list() | |||||
| self.dynamic_reshape_node = list() | self.dynamic_reshape_node = list() | ||||
| self.eliminated_nodes = list() | self.eliminated_nodes = list() | ||||
| @@ -499,12 +502,28 @@ class OnnxDataLoader: | |||||
| for opt_tensor_name, value in fetch_dict.items(): | for opt_tensor_name, value in fetch_dict.items(): | ||||
| self.tensors_dict[opt_tensor_name] = OnnxTensor(value, opt_tensor_name) | self.tensors_dict[opt_tensor_name] = OnnxTensor(value, opt_tensor_name) | ||||
| def _for_resize(): | |||||
| """Do resize nodes.""" | |||||
| nonlocal self | |||||
| output_tensors = [] | |||||
| if not self.dynamic_resize_node: | |||||
| return | |||||
| for node in self.dynamic_resize_node: | |||||
| shape_ref = self._nodes_dict[node].input_name_list[3] | |||||
| output_tensors.append(shape_ref) | |||||
| feed_dict = {self.input_nodes[0]: np.random.rand(*self.graph_input_shape).astype(np.float32)} | |||||
| fetch_dict = fetch_output_from_onnx_model(self.model, feed_dict=feed_dict, output_nodes=output_tensors) | |||||
| for opt_tensor_name, value in fetch_dict.items(): | |||||
| self.tensors_dict[opt_tensor_name] = OnnxTensor(value, opt_tensor_name) | |||||
| _for_reshape() | _for_reshape() | ||||
| _for_resize() | |||||
| def _find_nodes_to_be_eliminated(self): | def _find_nodes_to_be_eliminated(self): | ||||
| """Call all PASS to optimize graph.""" | """Call all PASS to optimize graph.""" | ||||
| for nd_name, nd_inst in self._nodes_dict.items(): | for nd_name, nd_inst in self._nodes_dict.items(): | ||||
| self._pass_of_shape(nd_name, nd_inst) | self._pass_of_shape(nd_name, nd_inst) | ||||
| self._pass_of_resize(nd_name, nd_inst) | |||||
| def _pass_of_shape(self, nd_name, nd_inst): | def _pass_of_shape(self, nd_name, nd_inst): | ||||
| """Create a PASS to optimize shape and reshape operations in ONNX ir graph.""" | """Create a PASS to optimize shape and reshape operations in ONNX ir graph.""" | ||||
| @@ -533,3 +552,29 @@ class OnnxDataLoader: | |||||
| eliminated_nodes = _traceback_precursor_nodes_until_shape_op(to_shape) | eliminated_nodes = _traceback_precursor_nodes_until_shape_op(to_shape) | ||||
| self.dynamic_reshape_node.append(nd_name) | self.dynamic_reshape_node.append(nd_name) | ||||
| self.eliminated_nodes += eliminated_nodes | self.eliminated_nodes += eliminated_nodes | ||||
| def _pass_of_resize(self, nd_name, nd_inst): | |||||
| """Create a PASS to optimize resize operations in ONNX ir graph.""" | |||||
| to_be_eliminated_op = {"Concat", "Cast", "Mul", "Slice", "Cast", "Gather", "Shape"} | |||||
| def _traceback_precursor_nodes_until_shape_op(node_ref): | |||||
| nonlocal self | |||||
| e_nodes = [] | |||||
| node = self._nodes_dict[self.output_name_to_node_name[node_ref]] | |||||
| if node.op_type not in to_be_eliminated_op: | |||||
| return e_nodes | |||||
| e_nodes.append(node.name) | |||||
| for ipt in node.input_name_list: | |||||
| if ipt not in self.tensors_dict: | |||||
| e_nodes += _traceback_precursor_nodes_until_shape_op(ipt) | |||||
| return e_nodes | |||||
| if nd_inst.op_type == "Resize": | |||||
| # Find the size params | |||||
| to_shape = nd_inst.input_name_list[3] | |||||
| if to_shape in self.tensors_dict: | |||||
| return | |||||
| eliminated_nodes = _traceback_precursor_nodes_until_shape_op(to_shape) | |||||
| self.dynamic_resize_node.append(nd_name) | |||||
| self.eliminated_nodes += eliminated_nodes | |||||