Browse Source

Add mapper of Slice, and fix bugs in tensoradd and pad.

tags/v1.1.0
liuchongming 5 years ago
parent
commit
c3b6491545
6 changed files with 79 additions and 16 deletions
  1. +9
    -9
      mindinsight/mindconverter/graph_based_converter/mapper/impl/nn/pad_mapper.py
  2. +43
    -0
      mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/slice_mapper.py
  3. +2
    -1
      mindinsight/mindconverter/graph_based_converter/mapper/onnx_to_ms.json
  4. +20
    -0
      mindinsight/mindconverter/graph_based_converter/sub_graph_searcher/searcher.py
  5. +4
    -0
      mindinsight/mindconverter/graph_based_converter/third_party_graph/onnx_graph_node.py
  6. +1
    -6
      mindinsight/mindconverter/graph_based_converter/third_party_graph/onnx_utils.py

+ 9
- 9
mindinsight/mindconverter/graph_based_converter/mapper/impl/nn/pad_mapper.py View File

@@ -45,15 +45,15 @@ class PadMapper(ONNXToMindSporeMapper):

@staticmethod
def _convert_params(**kwargs):
params = kwargs['params']
weights = kwargs.get("weights")
params = kwargs.get("params")
mode = params.get('mode', 'constant')
pads_onnx = params.get("pads") if params.get("pads") else list(weights.values())[0].tolist()
if mode == 'constant' and params.get('value') is None:
if params.get('pads'):
pads_onnx = params.get('pads')
if params.get('pads') or weights:
if isinstance(pads_onnx, list):
paddings = _padding_format_convert(pads_onnx)
return {'paddings': paddings,
'mode': '\"CONSTANT\"'}
return {'paddings': paddings, 'mode': '\"CONSTANT\"'}
if mode == 'constant':
if params['value'] == 0:
mode = '\"CONSTANT\"'
@@ -65,12 +65,12 @@ class PadMapper(ONNXToMindSporeMapper):
else:
msg = f"{{UNSUPPORTED: \"{mode}\"}}\"UNKNOWN\""
mode = msg
pads_onnx = params['pads']
half_index = len(pads_onnx) // 2
paddings = (
(num_begin, num_end) for num_begin, num_end in zip(pads_onnx[:half_index], pads_onnx[half_index:]))
return {'paddings': tuple(paddings),
'mode': mode}
(num_begin, num_end)
for num_begin, num_end in zip(pads_onnx[:half_index], pads_onnx[half_index:])
)
return {'paddings': tuple(paddings), 'mode': mode}

@staticmethod
def _convert_trained_weights(**kwargs):


+ 43
- 0
mindinsight/mindconverter/graph_based_converter/mapper/impl/ops/slice_mapper.py View File

@@ -0,0 +1,43 @@
# Copyright 2020 Huawei Technologies Co., Ltd.All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Mapper module."""
from ...base import ONNXToMindSporeMapper
from ...gen_setting import Setting


class SliceMapper(ONNXToMindSporeMapper):
"""Slice mapper."""

@staticmethod
def _operation_name_in_ms(*args, **kwargs):
return "P.Slice"

@staticmethod
def _convert_params(**kwargs):
return dict()

@staticmethod
def _convert_trained_weights(**kwargs):
return dict()

@staticmethod
def _convert_settings(**kwargs):
weights = list(kwargs.get("weights").values()) # start, end, axis
opt_shape = kwargs["params"].get("output_shape")
if not weights:
raise ValueError("Cannot get required params from slice.")
starts = sorted(zip(weights[0].tolist(), weights[2].tolist()), key=lambda x: x[1], reverse=False)
return Setting(op_extra_input={"begin": tuple([i[0] for i in starts]),
"size": tuple(opt_shape)})

+ 2
- 1
mindinsight/mindconverter/graph_based_converter/mapper/onnx_to_ms.json View File

@@ -15,5 +15,6 @@
"onnx::Transpose": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.transpose_mapper.TransposeMapper",
"onnx::MatMul": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.mat_mul_mapper.MatMulMapper",
"onnx::Softmax": "mindinsight.mindconverter.graph_based_converter.mapper.impl.nn.softmax_mapper.SoftmaxMapper",
"onnx::Reshape": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.reshape_mapper.ReshapeMapper"
"onnx::Reshape": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.reshape_mapper.ReshapeMapper",
"onnx::Slice": "mindinsight.mindconverter.graph_based_converter.mapper.impl.ops.slice_mapper.SliceMapper"
}

+ 20
- 0
mindinsight/mindconverter/graph_based_converter/sub_graph_searcher/searcher.py View File

@@ -232,6 +232,23 @@ def flatten_graph(graph):
return [f"Model/{node.op_type}" for _, node in graph.node_collection.items()]


def validate_topo_order_succession():
"""Validate whether topological order is successive."""
module_interval = dict()
for idx, node_name in enumerate(context.node_collection.keys()):
name_arr = node_name.split("/")
if len(name_arr) <= 2:
continue
node_name = "/".join(name_arr[:-2])
if node_name not in module_interval:
module_interval[node_name] = [idx]
continue
if module_interval[node_name][-1] != idx - 1:
return False
module_interval[node_name].append(idx)
return True


def generate_scope_name(data_loader):
"""
Generate scope name according to computation graph.
@@ -244,6 +261,9 @@ def generate_scope_name(data_loader):
"""
init_dag = _build_connection(data_loader)
try:
if not validate_topo_order_succession():
raise ValueError("Topological order is not successive.")

result = _sub_graph_matching(init_dag, beam_width=5, sub_graph_size=6)
topo_order_with_scope_name_list = _retrieve_scope_name(result) if result else flatten_graph(init_dag)



+ 4
- 0
mindinsight/mindconverter/graph_based_converter/third_party_graph/onnx_graph_node.py View File

@@ -122,6 +122,10 @@ class OnnxGraphNode(GraphNode):
if not self._op_name == op_name:
return declare, args
if not settings or not settings.op_extra_tensor:
# TensorAdd operation in onnx could add a tensor twice.
ipt_vars = args.split(", ")
if len(ipt_vars) == 1:
args = f"{ipt_vars[0]}, {ipt_vars[0]}"
return declare, args
declare_list = [declare]
declare_t = f"self.{variable_name}_w = Tensor(" \


+ 1
- 6
mindinsight/mindconverter/graph_based_converter/third_party_graph/onnx_utils.py View File

@@ -278,8 +278,6 @@ class OnnxDataLoader:
self.tensors_dict = {} # {tensor_name: OnnxTensor}
self.value_info_dict = {} # Not contains input and output nodes

self.tensor_name_set = set() # [str]
self.node_name_set = set() # [str]
self.node_output_shape_dict = OrderedDict() # {node_name: [int]}

# Key is edge of ONNX ir graph, value is the corresponding precursor node.
@@ -314,8 +312,7 @@ class OnnxDataLoader:
w = int(match.group('w'))
c = int(match.group('c'))
if [h, w, c] != list(self.graph_input_shape)[1:4]:
raise ValueError(
f"Shape given should be (N, {h}, {w}, {c}) but got {self.graph_input_shape}")
raise ValueError(f"Shape given should be (N, {h}, {w}, {c}) but got {self.graph_input_shape}")
return True
return False

@@ -387,7 +384,6 @@ class OnnxDataLoader:
for node in self.nodes:
n = OnnxNode(node)
self._nodes_dict[n.name] = n
self.node_name_set.add(n.name)
if len(node.output) > 1:
raise ModelNotSupport(msg=f"{node.name} has multi-outputs which is not supported now.")
self.output_name_to_node_name[node.output[0]] = node.name
@@ -398,7 +394,6 @@ class OnnxDataLoader:
for tensor in tensors:
t = OnnxTensor(tensor)
self.tensors_dict[t.name] = t
self.tensor_name_set.add(t.name)

def _parse_node_output_shape(self):
"""


Loading…
Cancel
Save