diff --git a/mindspore/ccsrc/debug/info.h b/mindspore/ccsrc/debug/info.h index a34d6e3df5..e8d02827d8 100644 --- a/mindspore/ccsrc/debug/info.h +++ b/mindspore/ccsrc/debug/info.h @@ -134,7 +134,7 @@ class DebugInfo : public Base { explicit DebugInfo(const LocationPtr &loc); - virtual ~DebugInfo() = default; + ~DebugInfo() override = default; MS_DECLARE_PARENT(DebugInfo, Base); int64_t debug_id(); int64_t unique_id() const { return unique_id_; } diff --git a/mindspore/ccsrc/debug/trace.cc b/mindspore/ccsrc/debug/trace.cc index 16ce77725e..a78d8446d8 100644 --- a/mindspore/ccsrc/debug/trace.cc +++ b/mindspore/ccsrc/debug/trace.cc @@ -231,10 +231,10 @@ std::string AnalyzedFuncGraphExporter::GetNodeType(const AnfNodePtr &node) { auto engine = node_cfg_->engine(); auto cfg = engine->MakeConfig(node, ctx); auto abs = engine->cache().GetValue(cfg); - if (abs == nullptr) { return "Undefined"; } + auto dtype = abs->BuildType(); auto shape = abs->BuildShape(); std::ostringstream oss; diff --git a/mindspore/ccsrc/debug/trace_info.h b/mindspore/ccsrc/debug/trace_info.h index e7a8c83dad..85eae0e958 100644 --- a/mindspore/ccsrc/debug/trace_info.h +++ b/mindspore/ccsrc/debug/trace_info.h @@ -321,7 +321,7 @@ class TraceTransform : public TraceInfo { std::string full_name() override { return full_name_ + transform_name_; } MS_DECLARE_PARENT(TraceTransform, TraceInfo); - virtual std::string symbol() { + std::string symbol() override { if (transform_name_.empty()) { return ""; } diff --git a/mindspore/ccsrc/ir/dtype/type.cc b/mindspore/ccsrc/ir/dtype/type.cc index 30bf0c8e3f..56954495df 100644 --- a/mindspore/ccsrc/ir/dtype/type.cc +++ b/mindspore/ccsrc/ir/dtype/type.cc @@ -87,6 +87,12 @@ const char *MetaIdLabel(const TypeId &v) { return "kMetaTypeExternal"; case kMetaTypeNone: return "kMetaTypeNone"; + case kMetaTypeNull: + return "kMetaTypeNull"; + case kMetaTypeEllipsis: + return "kMetaTypeEllipsis"; + case kMetaTypeEnd: + return "kMetaTypeEnd"; default: return "[Unknown Type Id]"; } diff --git a/mindspore/ccsrc/optimizer/irpass.cc b/mindspore/ccsrc/optimizer/irpass.cc index be9c8f787a..3b44700e1c 100644 --- a/mindspore/ccsrc/optimizer/irpass.cc +++ b/mindspore/ccsrc/optimizer/irpass.cc @@ -133,7 +133,6 @@ ResolveIRPassLib::ResolveIRPassLib() { InferenceOptPrepareLib::InferenceOptPrepareLib() { grad_var_prepare_ = MakeSubstitution(GradVarPrepare(), "grad_var_prepare", IsCNode); } - } // namespace irpass } // namespace opt } // namespace mindspore diff --git a/mindspore/ccsrc/optimizer/irpass.h b/mindspore/ccsrc/optimizer/irpass.h index 00274bdcc8..0af22c5cd0 100644 --- a/mindspore/ccsrc/optimizer/irpass.h +++ b/mindspore/ccsrc/optimizer/irpass.h @@ -159,7 +159,6 @@ inline bool IsCNodeDup(const AnfNodePtr &node) { } return false; } - } // namespace irpass } // namespace opt } // namespace mindspore diff --git a/mindspore/ccsrc/optimizer/irpass/grad_var_prepare.cc b/mindspore/ccsrc/optimizer/irpass/grad_var_prepare.cc index 32a42bc16b..317d67e792 100644 --- a/mindspore/ccsrc/optimizer/irpass/grad_var_prepare.cc +++ b/mindspore/ccsrc/optimizer/irpass/grad_var_prepare.cc @@ -31,7 +31,6 @@ namespace mindspore { namespace opt { namespace irpass { - static AnfNodePtr GenerateUnpackGraphNode(std::vector inputs_y, FuncGraphPtr func_graph, AnfNodePtr func_node, bool is_unpack, bool sens_param) { MS_EXCEPTION_IF_NULL(func_graph); diff --git a/mindspore/ccsrc/optimizer/irpass/grad_var_prepare.h b/mindspore/ccsrc/optimizer/irpass/grad_var_prepare.h index 599d1dca17..9713017d12 100644 --- a/mindspore/ccsrc/optimizer/irpass/grad_var_prepare.h +++ b/mindspore/ccsrc/optimizer/irpass/grad_var_prepare.h @@ -33,7 +33,6 @@ namespace mindspore { namespace opt { namespace irpass { - // {{GradOperation, g, w}, Ys} // {UnPackCall, {GradOperation, g, w}, Ys} class GradVarPrepare : public AnfVisitor { diff --git a/mindspore/ccsrc/pipeline/base.h b/mindspore/ccsrc/pipeline/base.h index 8ca153f45b..57edea03a2 100644 --- a/mindspore/ccsrc/pipeline/base.h +++ b/mindspore/ccsrc/pipeline/base.h @@ -28,13 +28,11 @@ namespace mindspore { namespace pipeline { - struct ExecutorInfo { FuncGraphPtr func_graph; ResourcePtr resource; std::size_t arg_list_size; }; - using ExecutorInfoPtr = std::shared_ptr; inline std::string GetPhasePrefix(const std::string &phase) { diff --git a/mindspore/ccsrc/pipeline/pipeline.cc b/mindspore/ccsrc/pipeline/pipeline.cc index 7524fb9d53..251a0c2d84 100644 --- a/mindspore/ccsrc/pipeline/pipeline.cc +++ b/mindspore/ccsrc/pipeline/pipeline.cc @@ -101,7 +101,7 @@ py::tuple GenerateKey(const std::string &name, const std::unordered_map &info, const py::di size_t pos = phase.find('.'); std::string net_id = ((pos == std::string::npos || pos == phase.size() - 1) ? phase : phase.substr(pos + 1)); std::string phase_prefix = phase.substr(0, pos); - if (phase_prefix == "export") { MS_LOG(INFO) << "Set DfGraphConvertor training : false"; convertor.set_training(false); @@ -348,7 +347,7 @@ py::object ExtractGeneralCnodeRet(const AbstractBasePtr &cnode_data, const py::t auto data_tp = cnode_data->cast(); auto elements = data_tp->elements(); size_t size = data_tp->size(); - py::tuple tp = py::tuple(size); + auto tp = py::tuple(size); for (size_t i = 0; i < size; i++) { tp[i] = ExtractGeneralCnodeRet(elements[i], data, count); } @@ -379,7 +378,7 @@ py::object StructureOutput(const AnfNodePtr &output_node, const py::tuple &data, if (output_c->IsApply(prim::kPrimMakeTuple)) { auto input_list = output_c->inputs(); size_t size = input_list.size(); - py::tuple tp = py::tuple(size - 1); + auto tp = py::tuple(size - 1); for (size_t i = 1; i < size; i++) { tp[i - 1] = StructureOutput(input_list[i], data, count); } @@ -401,11 +400,8 @@ std::shared_ptr DoExecGraph(const FuncGraphPtr &graph, const std::ve std::vector ge_outputs; transform::RunOptions run_options; - run_options.name = phase; - auto graph_runner = DfGraphManager::GetInstance().GetGraphRunner(); - if (graph_runner == nullptr) { MS_LOG(EXCEPTION) << "Can not found GraphRunner."; } @@ -478,7 +474,6 @@ void ProcessGeArg(const std::map &info, const py:: py::object ExecDFGraph(const std::map &info, const py::tuple &args, const std::string &phase) { std::string phase_prefix = GetPhasePrefix(phase); - if (phase_prefix == "save") { DoExecNonInputGraph(phase); ConfigManager::GetInstance().ResetConfig(); @@ -488,7 +483,6 @@ py::object ExecDFGraph(const std::map &info, const if (info.count(phase) == 0) { MS_LOG(EXCEPTION) << "There is no phase:" << phase; } - FuncGraphPtr anf_graph = info.at(phase)->func_graph; #ifdef ENABLE_INFER diff --git a/mindspore/ccsrc/pipeline/pipeline_ge.h b/mindspore/ccsrc/pipeline/pipeline_ge.h index 9dc1524682..f3a363dbe8 100644 --- a/mindspore/ccsrc/pipeline/pipeline_ge.h +++ b/mindspore/ccsrc/pipeline/pipeline_ge.h @@ -31,7 +31,6 @@ namespace mindspore { namespace pipeline { - namespace py = pybind11; void SetGeOption(const std::map &options); @@ -50,7 +49,6 @@ bool InitExecDatasetGe(const std::string &queue_name, int64_t size, int64_t batc const std::vector &input_indexes, const std::string &phase); void ExportDFGraph(const std::string &file_name, const std::string &phase); - } // namespace pipeline } // namespace mindspore diff --git a/mindspore/ccsrc/pipeline/static_analysis/abstract_function.h b/mindspore/ccsrc/pipeline/static_analysis/abstract_function.h index 133d5e99a9..513b290a9d 100644 --- a/mindspore/ccsrc/pipeline/static_analysis/abstract_function.h +++ b/mindspore/ccsrc/pipeline/static_analysis/abstract_function.h @@ -41,7 +41,7 @@ class AbstractFuncAtom : public AbstractFunction { AbstractFunctionPtr Join(const AbstractFunctionPtr &other) final; void Visit(std::function) const final; - bool operator==(const AbstractFunction &other) const; + bool operator==(const AbstractFunction &other) const override; std::size_t hash() const override { return tid(); } }; @@ -270,7 +270,7 @@ class TypedPrimitiveAbstractClosure : public AbstractFuncAtom { class DummyAbstractClosure : public AbstractFuncAtom { public: DummyAbstractClosure() = default; - ~DummyAbstractClosure() = default; + ~DummyAbstractClosure() override = default; MS_DECLARE_PARENT(DummyAbstractClosure, AbstractFuncAtom) EvaluatorPtr GetEvaluator(AnalysisEnginePtr) override { MS_LOG(EXCEPTION) << "A dummy function cannot eval."; } diff --git a/mindspore/ccsrc/pipeline/static_analysis/prim.cc b/mindspore/ccsrc/pipeline/static_analysis/prim.cc index 1115cd9978..d71ad8f710 100644 --- a/mindspore/ccsrc/pipeline/static_analysis/prim.cc +++ b/mindspore/ccsrc/pipeline/static_analysis/prim.cc @@ -295,7 +295,6 @@ py::dict ConvertAbstractToPython(const AbstractBasePtr &abs_base) { dic["shape"] = shape; dic["dtype"] = arg_slice->BuildType(); dic["value"] = BuildValue(arg_slice->BuildValue()); - } else if (abs_base->isa()) { auto arg_tuple = dyn_cast(abs_base); size_t len = arg_tuple->size(); diff --git a/mindspore/ops/operations/array_ops.py b/mindspore/ops/operations/array_ops.py index 38504990e8..67f86bf87b 100644 --- a/mindspore/ops/operations/array_ops.py +++ b/mindspore/ops/operations/array_ops.py @@ -639,9 +639,9 @@ class TruncatedNormal(PrimitiveWithInfer): Tensor, type of output tensor is same as attribute `dtype`. Examples: - >>> input_shape = Tensor(np.array([1, 2, 3])) + >>> shape = (1, 2, 3) >>> truncated_normal = P.TruncatedNormal() - >>> output = truncated_normal(input_shape) + >>> output = truncated_normal(shape) """ @prim_attr_register @@ -652,6 +652,8 @@ class TruncatedNormal(PrimitiveWithInfer): def __infer__(self, shape): shape_value = shape['value'] + validator.check_const_input("shape", shape_value) + validator.check_type("shape", shape_value, [tuple]) for i, value in enumerate(shape_value): validator.check_integer(f'{i}th value of shape', value, 0, Rel.GT) out = {'shape': shape_value, @@ -1642,15 +1644,16 @@ class StridedSlice(PrimitiveWithInfer): validator.check_type('shrink_axis_mask', shrink_axis_mask, [int]) def __infer__(self, x, begin, end, strides): - x_shape = x['shape'] - x_shp_len = len(x_shape) begin_v, end_v, strides_v = begin['value'], end['value'], strides['value'] validator.check_const_input("begin", begin_v) validator.check_const_input("end", end_v) validator.check_const_input("strides", strides_v) - validator.check_type("begin", begin['value'], [tuple]) - validator.check_type("end", end['value'], [tuple]) - validator.check_type("strides", strides['value'], [tuple]) + validator.check_type("begin", begin_v, [tuple]) + validator.check_type("end", end_v, [tuple]) + validator.check_type("strides", strides_v, [tuple]) + + x_shape = x['shape'] + x_shp_len = len(x_shape) if len(begin_v) != x_shp_len or len(end_v) != x_shp_len or len(strides_v) != x_shp_len: raise ValueError(f"The length of begin index{begin_v}, end index{end_v} and strides{strides_v} " f"must be equal to the dims({x_shp_len}) of input.") diff --git a/tests/ut/python/ops/test_ops.py b/tests/ut/python/ops/test_ops.py index 22df3d1fd3..d6622e76f4 100755 --- a/tests/ut/python/ops/test_ops.py +++ b/tests/ut/python/ops/test_ops.py @@ -372,7 +372,7 @@ test_case_math_ops = [ 'desc_bprop': [[3]]}), ('TruncatedNormal', { 'block': P.TruncatedNormal(), - 'desc_const': [[1, 2, 3]], + 'desc_const': [(1, 2, 3)], 'desc_inputs': [], 'skip': ['backward'], 'add_fake_input': True}),