diff --git a/mindspore/ccsrc/backend/common/pass/common_subexpression_elimination.cc b/mindspore/ccsrc/backend/common/pass/common_subexpression_elimination.cc index 17055e5ec2..f692a20aa4 100644 --- a/mindspore/ccsrc/backend/common/pass/common_subexpression_elimination.cc +++ b/mindspore/ccsrc/backend/common/pass/common_subexpression_elimination.cc @@ -111,7 +111,7 @@ bool BackendCSE::CheckCNode(const CNodePtr &main, const CNodePtr &node) const { if (!context_ptr->get_param(MS_CTX_ENABLE_LOOP_SINK) && CheckIgnoreCase(main)) { return false; } - if (HasRandomEffect(main) || HasRandomEffect(node)) { + if (HasHiddenSideEffect(main) || HasHiddenSideEffect(node)) { return false; } if (!CheckEqualKernelBuildInfo(main, node)) { diff --git a/mindspore/ccsrc/frontend/optimizer/cse.cc b/mindspore/ccsrc/frontend/optimizer/cse.cc index dd35aca786..f2aff457ce 100644 --- a/mindspore/ccsrc/frontend/optimizer/cse.cc +++ b/mindspore/ccsrc/frontend/optimizer/cse.cc @@ -133,13 +133,12 @@ bool CSE::BuildOrderGroupAndDoReplace(const FuncGraphManagerPtr manager) const { return changed; } -bool CSE::HasRandomEffect(const AnfNodePtr &node) { +bool CSE::HasHiddenSideEffect(const AnfNodePtr &node) { auto prim = GetCNodePrimitive(node); if (prim == nullptr) { return false; } - auto attr = prim->GetAttr(GRAPH_FLAG_RANDOM_EFFECT); - return (attr != nullptr) && attr->isa() && GetValue(attr); + return prim->HasAttr(GRAPH_FLAG_SIDE_EFFECT_HIDDEN); } bool CSE::CheckReplace(const AnfNodePtr &main, const AnfNodePtr &node) const { @@ -181,7 +180,7 @@ bool CSE::CheckReplace(const AnfNodePtr &main, const AnfNodePtr &node) const { return false; } // We don't merge primitive cnodes with random effect. - return !HasRandomEffect(c_main); + return !HasHiddenSideEffect(c_main); } // a parameter node. return false; diff --git a/mindspore/ccsrc/frontend/optimizer/cse.h b/mindspore/ccsrc/frontend/optimizer/cse.h index 55bf045bd0..d1c593c707 100644 --- a/mindspore/ccsrc/frontend/optimizer/cse.h +++ b/mindspore/ccsrc/frontend/optimizer/cse.h @@ -38,7 +38,7 @@ class CSE { virtual bool Cse(const FuncGraphPtr root, const FuncGraphManagerPtr manager) const; - static bool HasRandomEffect(const AnfNodePtr &node); + static bool HasHiddenSideEffect(const AnfNodePtr &node); protected: bool BuildOrderGroupAndDoReplaceForOneGraph(const FuncGraphPtr &fg, const FuncGraphManagerPtr &manager) const; diff --git a/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc b/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc index 04d22e1e4b..133070f13c 100644 --- a/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc +++ b/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc @@ -357,7 +357,7 @@ void GetSingleOpGraphInfo(const OpExecInfoPtr &op_exec_info, const std::vectorpy_primitive; MS_EXCEPTION_IF_NULL(op_prim); - bool is_random_effect_op = op_prim->HasAttr(GRAPH_FLAG_RANDOM_EFFECT); + bool has_hidden_side_effect = op_prim->HasAttr(GRAPH_FLAG_SIDE_EFFECT_HIDDEN); for (size_t index = 0; index < input_tensors.size(); ++index) { MS_EXCEPTION_IF_NULL(input_tensors[index]); buf << input_tensors[index]->shape(); @@ -365,7 +365,7 @@ void GetSingleOpGraphInfo(const OpExecInfoPtr &op_exec_info, const std::vectorpadding_type(); // In the case of the same shape, but dtype and format are inconsistent auto tensor_addr = input_tensors[index]->device_address(); - if (tensor_addr != nullptr && !is_random_effect_op) { + if (tensor_addr != nullptr && !has_hidden_side_effect) { auto p_address = std::dynamic_pointer_cast(tensor_addr); MS_EXCEPTION_IF_NULL(p_address); buf << p_address->type_id(); @@ -407,8 +407,8 @@ void GetSingleOpGraphInfo(const OpExecInfoPtr &op_exec_info, const std::vectortype_id(); } - // Random effect operator - if (is_random_effect_op) { + // Operator with hidden side effect. + if (has_hidden_side_effect) { buf << "_" << std::to_string(op_prim->id()); } diff --git a/mindspore/core/utils/flags.h b/mindspore/core/utils/flags.h index 2e8bbde410..f36d454b66 100644 --- a/mindspore/core/utils/flags.h +++ b/mindspore/core/utils/flags.h @@ -22,9 +22,9 @@ namespace mindspore { inline const char GRAPH_FLAG_MIX_PRECISION_FP16[] = "fp16"; inline const char GRAPH_FLAG_MIX_PRECISION_FP32[] = "fp32"; inline const char GRAPH_FLAG_CACHE_ENABLE[] = "cache_enable"; -inline const char GRAPH_FLAG_RANDOM_EFFECT[] = "_random_effect"; inline const char GRAPH_FLAG_SIDE_EFFECT_IO[] = "side_effect_io"; inline const char GRAPH_FLAG_SIDE_EFFECT_MEM[] = "side_effect_mem"; +inline const char GRAPH_FLAG_SIDE_EFFECT_HIDDEN[] = "side_effect_hidden"; inline const char GRAPH_FLAG_SIDE_EFFECT_EXCEPTION[] = "side_effect_exception"; inline const char GRAPH_FLAG_SIDE_EFFECT_PROPAGATE[] = "side_effect_propagate"; inline const char GRAPH_FLAG_SIDE_EFFECT_BACKPROP[] = "side_effect_backprop"; diff --git a/mindspore/python/mindspore/ops/operations/nn_ops.py b/mindspore/python/mindspore/ops/operations/nn_ops.py index c87d0dc9d5..2d44348d15 100644 --- a/mindspore/python/mindspore/ops/operations/nn_ops.py +++ b/mindspore/python/mindspore/ops/operations/nn_ops.py @@ -3178,7 +3178,7 @@ class DropoutGenMask(Primitive): self.init_prim_io_names(inputs=['shape', 'keep_prob'], outputs=['output']) validator.check_value_type("Seed0", Seed0, [int], self.name) validator.check_value_type("Seed1", Seed1, [int], self.name) - self.add_prim_attr("_random_effect", True) + self.add_prim_attr("side_effect_hidden", True) class DropoutDoMask(Primitive): diff --git a/mindspore/python/mindspore/ops/operations/other_ops.py b/mindspore/python/mindspore/ops/operations/other_ops.py index 338de6ff07..33ebe2a7bf 100644 --- a/mindspore/python/mindspore/ops/operations/other_ops.py +++ b/mindspore/python/mindspore/ops/operations/other_ops.py @@ -678,6 +678,7 @@ class Push(PrimitiveWithInfer): """Initialize Push""" self.add_prim_attr("primitive_target", "CPU") self.init_prim_io_names(inputs=['optim_inputs', 'optim_input_shapes'], outputs=['key']) + self.add_prim_attr("side_effect_hidden", True) def infer_shape(self, inputs, shapes): return [1] diff --git a/mindspore/python/mindspore/ops/operations/random_ops.py b/mindspore/python/mindspore/ops/operations/random_ops.py index ff2f0c4b95..ab7d39584c 100644 --- a/mindspore/python/mindspore/ops/operations/random_ops.py +++ b/mindspore/python/mindspore/ops/operations/random_ops.py @@ -1,4 +1,4 @@ -# Copyright 2020 Huawei Technologies Co., Ltd +# Copyright 2020-2022 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -62,7 +62,7 @@ class StandardNormal(PrimitiveWithInfer): def __init__(self, seed=0, seed2=0): """Initialize StandardNormal""" self.init_prim_io_names(inputs=['shape'], outputs=['output']) - self.add_prim_attr("_random_effect", True) + self.add_prim_attr("side_effect_hidden", True) Validator.check_non_negative_int(seed, "seed", self.name) Validator.check_non_negative_int(seed2, "seed2", self.name) @@ -119,7 +119,7 @@ class StandardLaplace(PrimitiveWithInfer): def __init__(self, seed=0, seed2=0): """Initialize StandardLaplace""" self.init_prim_io_names(inputs=['shape'], outputs=['output']) - self.add_prim_attr("_random_effect", True) + self.add_prim_attr("side_effect_hidden", True) Validator.check_value_type('seed', seed, [int], self.name) Validator.check_value_type('seed2', seed2, [int], self.name) @@ -196,7 +196,7 @@ class Gamma(PrimitiveWithInfer): def __init__(self, seed=0, seed2=0): """Initialize Gamma""" self.init_prim_io_names(inputs=['shape', 'alpha', 'beta'], outputs=['output']) - self.add_prim_attr("_random_effect", True) + self.add_prim_attr("side_effect_hidden", True) Validator.check_non_negative_int(seed, "seed", self.name) Validator.check_non_negative_int(seed2, "seed2", self.name) @@ -262,7 +262,7 @@ class Poisson(PrimitiveWithInfer): def __init__(self, seed=0, seed2=0): """Initialize Poisson""" self.init_prim_io_names(inputs=['shape', 'mean'], outputs=['output']) - self.add_prim_attr("_random_effect", True) + self.add_prim_attr("side_effect_hidden", True) Validator.check_non_negative_int(seed, "seed", self.name) Validator.check_non_negative_int(seed2, "seed2", self.name) @@ -334,7 +334,7 @@ class UniformInt(PrimitiveWithInfer): def __init__(self, seed=0, seed2=0): """Initialize UniformInt""" self.init_prim_io_names(inputs=['shape', 'minval', 'maxval'], outputs=['output']) - self.add_prim_attr("_random_effect", True) + self.add_prim_attr("side_effect_hidden", True) Validator.check_non_negative_int(seed, "seed", self.name) Validator.check_non_negative_int(seed2, "seed2", self.name) @@ -451,7 +451,7 @@ class RandomChoiceWithMask(PrimitiveWithInfer): Validator.check_positive_int(count, "count", self.name) Validator.check_value_type('seed', seed, [int], self.name) Validator.check_value_type('seed2', seed2, [int], self.name) - self.add_prim_attr("_random_effect", True) + self.add_prim_attr("side_effect_hidden", True) def infer_shape(self, x_shape): Validator.check_int(len(x_shape), 1, Rel.GE, "input_x rank", self.name) @@ -513,7 +513,7 @@ class RandomCategorical(PrimitiveWithInfer): Validator.check_type_name("dtype", dtype, valid_values, self.name) self.init_prim_io_names(inputs=['logits', 'num_samples', 'seed'], outputs=['output']) - self.add_prim_attr("_random_effect", True) + self.add_prim_attr("side_effect_hidden", True) def __infer__(self, logits, num_samples, seed): logits_dtype = logits['dtype'] @@ -580,7 +580,7 @@ class Multinomial(PrimitiveWithInfer): Validator.check_non_negative_int(seed, "seed", self.name) Validator.check_non_negative_int(seed2, "seed2", self.name) self.init_prim_io_names(inputs=['input', 'num_sample'], outputs=['output']) - self.add_prim_attr("_random_effect", True) + self.add_prim_attr("side_effect_hidden", True) def __infer__(self, inputs, num_samples): input_shape = inputs["shape"]