Browse Source

!6347 support not in and add check for grad_with_sens with no sense provided

Merge pull request !6347 from zhangbuxue/support_not_in_and_add_check_for_grad_with_sens_with_no_sense_provided
tags/v1.0.0
mindspore-ci-bot Gitee 5 years ago
parent
commit
fd7bcd045a
22 changed files with 187 additions and 26 deletions
  1. +1
    -1
      mindspore/_extends/parse/resources.py
  2. +1
    -1
      mindspore/ccsrc/frontend/operator/composite/composite.cc
  3. +7
    -4
      mindspore/ccsrc/pipeline/jit/static_analysis/prim.cc
  4. +2
    -3
      mindspore/nn/layer/normalization.py
  5. +3
    -1
      mindspore/ops/composite/multitype_ops/__init__.py
  6. +1
    -1
      mindspore/ops/composite/multitype_ops/add_impl.py
  7. +1
    -1
      mindspore/ops/composite/multitype_ops/div_impl.py
  8. +1
    -1
      mindspore/ops/composite/multitype_ops/equal_impl.py
  9. +1
    -1
      mindspore/ops/composite/multitype_ops/getitem_impl.py
  10. +1
    -1
      mindspore/ops/composite/multitype_ops/greater_equal_impl.py
  11. +1
    -1
      mindspore/ops/composite/multitype_ops/greater_impl.py
  12. +2
    -2
      mindspore/ops/composite/multitype_ops/in_impl.py
  13. +1
    -1
      mindspore/ops/composite/multitype_ops/less_equal_impl.py
  14. +1
    -1
      mindspore/ops/composite/multitype_ops/less_impl.py
  15. +1
    -1
      mindspore/ops/composite/multitype_ops/logic_not_impl.py
  16. +1
    -1
      mindspore/ops/composite/multitype_ops/logical_and_impl.py
  17. +1
    -1
      mindspore/ops/composite/multitype_ops/logical_or_impl.py
  18. +1
    -1
      mindspore/ops/composite/multitype_ops/not_equal_impl.py
  19. +101
    -0
      mindspore/ops/composite/multitype_ops/not_in_impl.py
  20. +1
    -1
      mindspore/ops/composite/multitype_ops/setitem_impl.py
  21. +1
    -1
      mindspore/ops/composite/multitype_ops/uadd_impl.py
  22. +56
    -0
      tests/ut/python/pipeline/infer/test_not_in.py

+ 1
- 1
mindspore/_extends/parse/resources.py View File

@@ -107,7 +107,7 @@ convert_object_map = {
T.is_: F.is_, T.is_: F.is_,
T.is_not: F.is_not, T.is_not: F.is_not,
T.contains: multitype_ops.in_, T.contains: multitype_ops.in_,
T.not_contains: F.not_in_dict,
T.not_contains: multitype_ops.not_in_,


# system function # system function
T.len: M.ms_len, T.len: M.ms_len,


+ 1
- 1
mindspore/ccsrc/frontend/operator/composite/composite.cc View File

@@ -641,7 +641,7 @@ void GradOperation::doGetGrad(const FuncGraphPtr &func_graph, AnfNodePtr out, An


// Generate the graph. // Generate the graph.
FuncGraphPtr GradOperation::GenerateFuncGraph(const AbstractBasePtrList &args_spec_list) { FuncGraphPtr GradOperation::GenerateFuncGraph(const AbstractBasePtrList &args_spec_list) {
if (args_spec_list.size() < 1) {
if (args_spec_list.empty()) {
MS_LOG(EXCEPTION) << "GenerateGraph requires at least 1 parameters, while the input size is " MS_LOG(EXCEPTION) << "GenerateGraph requires at least 1 parameters, while the input size is "
<< args_spec_list.size() << "."; << args_spec_list.size() << ".";
} }


+ 7
- 4
mindspore/ccsrc/pipeline/jit/static_analysis/prim.cc View File

@@ -138,7 +138,7 @@ EvalResultPtr UnpackGraphEvaluator::Run(AnalysisEnginePtr engine, const ConfigPt
auto unpack_graph = prim_->cast<prim::UnpackGraphPrimitivePtr>(); auto unpack_graph = prim_->cast<prim::UnpackGraphPrimitivePtr>();
auto out_node = out_conf->node()->cast<CNodePtr>(); auto out_node = out_conf->node()->cast<CNodePtr>();
const auto &out_node_inputs = out_node->inputs(); const auto &out_node_inputs = out_node->inputs();
if (out_node->inputs().size() == 0 || (out_node_inputs.size() - 1) != args_conf_list.size()) {
if (out_node->inputs().empty() || (out_node_inputs.size() - 1) != args_conf_list.size()) {
MS_LOG(EXCEPTION) << "UnpackGraphPrimitive" MS_LOG(EXCEPTION) << "UnpackGraphPrimitive"
<< " args size should equal to inputs size minus 1, but args size " << args_conf_list.size() << " args size should equal to inputs size minus 1, but args size " << args_conf_list.size()
<< ", inputs size " << out_node_inputs.size(); << ", inputs size " << out_node_inputs.size();
@@ -149,7 +149,7 @@ EvalResultPtr UnpackGraphEvaluator::Run(AnalysisEnginePtr engine, const ConfigPt
[](const ConfigPtr &ref) -> AbstractBasePtr { return ref->GetEvaluatedValue()->abstract(); }); [](const ConfigPtr &ref) -> AbstractBasePtr { return ref->GetEvaluatedValue()->abstract(); });
// get the forward graph // get the forward graph
MS_EXCEPTION_IF_NULL(args_spec_list[0]); MS_EXCEPTION_IF_NULL(args_spec_list[0]);
AbstractFunctionPtr fn = args_spec_list[0]->cast<AbstractFunctionPtr>();
auto fn = args_spec_list[0]->cast<AbstractFunctionPtr>();
if (fn == nullptr) { if (fn == nullptr) {
MS_LOG(EXCEPTION) << "UnpackGraphPrimitive arg0 must be AbstractFunction, but " << args_spec_list[0]->ToString(); MS_LOG(EXCEPTION) << "UnpackGraphPrimitive arg0 must be AbstractFunction, but " << args_spec_list[0]->ToString();
} }
@@ -161,6 +161,9 @@ EvalResultPtr UnpackGraphEvaluator::Run(AnalysisEnginePtr engine, const ConfigPt
GetUnpackGraphSpecArgsList(args_spec_list, unpack_graph->need_unpack_args()); GetUnpackGraphSpecArgsList(args_spec_list, unpack_graph->need_unpack_args());


AbstractBasePtrList graph_specialize_args_without_sens; AbstractBasePtrList graph_specialize_args_without_sens;
if (unpack_graph->with_sens_in_args() && graph_specialize_args.empty()) {
MS_EXCEPTION(ValueError) << "Grad with sens, but the sens is not provided.";
}
(void)std::transform(graph_specialize_args.begin(), (void)std::transform(graph_specialize_args.begin(),
graph_specialize_args.end() - (unpack_graph->with_sens_in_args() ? 1 : 0), graph_specialize_args.end() - (unpack_graph->with_sens_in_args() ? 1 : 0),
std::back_inserter(graph_specialize_args_without_sens), [](AbstractBasePtr abs) { return abs; }); std::back_inserter(graph_specialize_args_without_sens), [](AbstractBasePtr abs) { return abs; });
@@ -177,8 +180,8 @@ EvalResultPtr UnpackGraphEvaluator::Run(AnalysisEnginePtr engine, const ConfigPt
return engine->ForwardConfig(out_conf, fn_conf); return engine->ForwardConfig(out_conf, fn_conf);
} }


AnfNodePtr MixedPrecisionCastHelper(AnfNodePtr source_node, AbstractBasePtr node_type, AnfNodePtr target_type,
FuncGraphPtr func_graph) {
AnfNodePtr MixedPrecisionCastHelper(const AnfNodePtr &source_node, const AbstractBasePtr &node_type,
const AnfNodePtr &target_type, const FuncGraphPtr &func_graph) {
AnfNodePtr target_node = source_node; AnfNodePtr target_node = source_node;
if (node_type->isa<AbstractTensor>()) { if (node_type->isa<AbstractTensor>()) {
auto x = node_type->cast<AbstractTensorPtr>(); auto x = node_type->cast<AbstractTensorPtr>();


+ 2
- 3
mindspore/nn/layer/normalization.py View File

@@ -27,12 +27,12 @@ from mindspore._checkparam import check_int_positive
from mindspore.ops import _selected_ops from mindspore.ops import _selected_ops
from ..cell import Cell from ..cell import Cell




__all__ = ['BatchNorm1d', 'BatchNorm2d', 'LayerNorm', 'GroupNorm', 'GlobalBatchNorm'] __all__ = ['BatchNorm1d', 'BatchNorm2d', 'LayerNorm', 'GroupNorm', 'GlobalBatchNorm']



class _BatchNorm(Cell): class _BatchNorm(Cell):
"""Batch Normalization base class.""" """Batch Normalization base class."""

@cell_attr_register @cell_attr_register
def __init__(self, def __init__(self,
num_features, num_features,
@@ -132,7 +132,6 @@ class _BatchNorm(Cell):
group_list = [list(i) for i in world_rank_list] group_list = [list(i) for i in world_rank_list]
return group_list return group_list



def _global_sync(self, x, axes, re_shape): def _global_sync(self, x, axes, re_shape):
"""calculate global batch normalization output""" """calculate global batch normalization output"""
x_mean = self.reduce_mean(x, axes) x_mean = self.reduce_mean(x, axes)


+ 3
- 1
mindspore/ops/composite/multitype_ops/__init__.py View File

@@ -38,6 +38,7 @@ from .logical_or_impl import logical_or
from .logic_not_impl import logical_not from .logic_not_impl import logical_not
from .uadd_impl import uadd from .uadd_impl import uadd
from .in_impl import in_ from .in_impl import in_
from .not_in_impl import not_in_
__all__ = [ __all__ = [
'add', 'add',
'sub', 'sub',
@@ -61,5 +62,6 @@ __all__ = [
'logical_and', 'logical_and',
'logical_or', 'logical_or',
'logical_not', 'logical_not',
'in_'
'in_',
'not_in_'
] ]

+ 1
- 1
mindspore/ops/composite/multitype_ops/add_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""add_impl"""
"""Implementation for internal polymorphism `add` operations."""


from ...composite import base from ...composite import base
from ... import functional as F from ... import functional as F


+ 1
- 1
mindspore/ops/composite/multitype_ops/div_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""div_impl"""
"""Implementation for internal polymorphism `div` operations."""


from ...composite import base from ...composite import base
from ... import functional as F from ... import functional as F


+ 1
- 1
mindspore/ops/composite/multitype_ops/equal_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""equal_impl"""
"""Implementation for internal polymorphism `equal` operations."""
from . import _constexpr_utils as const_utils from . import _constexpr_utils as const_utils
from ...composite import base from ...composite import base
from ... import functional as F from ... import functional as F


+ 1
- 1
mindspore/ops/composite/multitype_ops/getitem_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""Implementation for getitem."""
"""Implementation for internal polymorphism `getitem` operations."""
from . import _compile_utils as compile_utils from . import _compile_utils as compile_utils
from .. import base from .. import base
from ... import functional as F from ... import functional as F


+ 1
- 1
mindspore/ops/composite/multitype_ops/greater_equal_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""greater_equal_impl"""
"""Implementation for internal polymorphism `greater_equal` operations."""
from mindspore.ops.composite import base from mindspore.ops.composite import base
from mindspore.ops import functional as F from mindspore.ops import functional as F




+ 1
- 1
mindspore/ops/composite/multitype_ops/greater_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""equal_impl"""
"""Implementation for internal polymorphism `greater` operations."""
from mindspore.ops.composite import base from mindspore.ops.composite import base
from mindspore.ops import functional as F from mindspore.ops import functional as F




+ 2
- 2
mindspore/ops/composite/multitype_ops/in_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""in_impl"""
"""Implementation for internal polymorphism `in` operations."""


from . import _constexpr_utils as const_utils from . import _constexpr_utils as const_utils
from ... import functional as F from ... import functional as F
@@ -21,7 +21,7 @@ from ...composite import base


in_ = base.MultitypeFuncGraph("in", True) in_ = base.MultitypeFuncGraph("in", True)
""" """
in_ is a metafuncgraph object which will determine if a in b
"in_" is a multi type func graph object which will determine if a in b
using ".register" decorator using ".register" decorator
""" """




+ 1
- 1
mindspore/ops/composite/multitype_ops/less_equal_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""less_equal_impl"""
"""Implementation for internal polymorphism `less_equal` operations."""
from mindspore.ops.composite import base from mindspore.ops.composite import base
from mindspore.ops import functional as F from mindspore.ops import functional as F




+ 1
- 1
mindspore/ops/composite/multitype_ops/less_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""equal_impl"""
"""Implementation for internal polymorphism `less` operations."""
from mindspore.ops.composite import base from mindspore.ops.composite import base
from mindspore.ops import functional as F from mindspore.ops import functional as F




+ 1
- 1
mindspore/ops/composite/multitype_ops/logic_not_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""logical_not_impl"""
"""Implementation for internal polymorphism `logical not` operations."""
from mindspore.ops.composite import base from mindspore.ops.composite import base
from mindspore.ops import functional as F from mindspore.ops import functional as F




+ 1
- 1
mindspore/ops/composite/multitype_ops/logical_and_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""logical_and_impl"""
"""Implementation for internal polymorphism `logical and` operations."""
from mindspore.ops.composite import base from mindspore.ops.composite import base
from mindspore.ops import functional as F from mindspore.ops import functional as F




+ 1
- 1
mindspore/ops/composite/multitype_ops/logical_or_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""logical_or_impl"""
"""Implementation for internal polymorphism `logical or` operations."""
from mindspore.ops.composite import base from mindspore.ops.composite import base
from mindspore.ops import functional as F from mindspore.ops import functional as F




+ 1
- 1
mindspore/ops/composite/multitype_ops/not_equal_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""not_equal_impl"""
"""Implementation for internal polymorphism `not equal` operations."""


from ...composite import base from ...composite import base
from ... import functional as F from ... import functional as F


+ 101
- 0
mindspore/ops/composite/multitype_ops/not_in_impl.py View File

@@ -0,0 +1,101 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================

"""Implementation for internal polymorphism `not in` operations."""

from . import _constexpr_utils as const_utils
from ... import functional as F
from ...composite import base

not_in_ = base.MultitypeFuncGraph("not_in", True)
"""
"not_in_" is a multi type func graph object which will determine if a not in b.
using ".register" decorator
"""


@not_in_.register("Number", "Tuple")
def _number_not_in_tuple(x, y):
"""
Determine if a number not in tuple.

Args:
x (Number): x
y (tuple): y

Returns:
bool, if x not in y return true, x in y return false.
"""
return not const_utils.scalar_in_sequence(x, y)


@not_in_.register("Number", "List")
def _number_not_in_list(x, y):
"""
Determine if a number not in list.

Args:
x (Number): x
y (list): y

Returns:
bool, if x not in y return true, x in y return false.
"""
return not const_utils.scalar_in_sequence(x, y)


@not_in_.register("String", "Tuple")
def _string_not_in_tuple(x, y):
"""
Determine if a str not in a tuple.

Args:
x (str): x
y (tuple): y

Returns:
bool, if x not in y return true, x in y return false.
"""
return not const_utils.scalar_in_sequence(x, y)


@not_in_.register("String", "List")
def _string_not_in_list(x, y):
"""
Determine if a str not in a list.

Args:
x (str): x
y (list): y

Returns:
bool, if x not in y return true, x in y return false.
"""
return not const_utils.scalar_in_sequence(x, y)


@not_in_.register("String", "Dictionary")
def _str_not_in_dict(x, y):
"""
Determine if a str not in dict.

Args:
x: str
y: dict

Returns:
bool, if x not in y return true, x in y return false.
"""
return F.not_in_dict(x, y)

+ 1
- 1
mindspore/ops/composite/multitype_ops/setitem_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""Implementation for setitem."""
"""Implementation for internal polymorphism `setitem` operations."""


from . import _compile_utils as compile_utils from . import _compile_utils as compile_utils
from ... import functional as F from ... import functional as F


+ 1
- 1
mindspore/ops/composite/multitype_ops/uadd_impl.py View File

@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================


"""uadd_impl"""
"""Implementation for internal polymorphism `uadd` operations."""
from mindspore.ops.composite import base from mindspore.ops.composite import base


# uadd is a metagraph object which will return operation result regarding input # uadd is a metagraph object which will return operation result regarding input


+ 56
- 0
tests/ut/python/pipeline/infer/test_not_in.py View File

@@ -0,0 +1,56 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
""" test not in"""
import numpy as np

import mindspore.nn as nn
from mindspore import context, Tensor

context.set_context(mode=context.GRAPH_MODE)


def test_number_not_in_tuple():
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.tuple_ = (2, 3, 4)
self.list_ = [2, 3, 4]
self.dict_ = {"a": Tensor(np.ones([1, 2, 3], np.int32)),
"b": Tensor(np.ones([1, 2, 3], np.int32)),
"c": Tensor(np.ones([1, 2, 3], np.int32))}
self.number_in = 3
self.number_not_in = 5
self.str_in = "a"
self.str_not_in = "e"

def construct(self):
ret = 0
if self.number_in not in self.tuple_:
ret += 1
if self.number_not_in not in self.tuple_:
ret += 1
if self.number_in not in self.list_:
ret += 3
if self.number_not_in not in self.list_:
ret += 3
if self.str_in not in self.dict_:
ret += 5
if self.str_not_in not in self.dict_:
ret += 5
return ret

net = Net()
output = net()
assert output == 9

Loading…
Cancel
Save