diff --git a/mindspore/ccsrc/debug/info.cc b/mindspore/ccsrc/debug/info.cc index 406bd11fab..f58522cf33 100644 --- a/mindspore/ccsrc/debug/info.cc +++ b/mindspore/ccsrc/debug/info.cc @@ -126,10 +126,10 @@ int64_t DebugInfo::debug_id() { } int64_t DebugInfo::unique_id_through_copy() const { - TraceInfoPtr trace_info = const_cast(this)->trace_info(); - if (trace_info != nullptr) { - if (trace_info->isa() && trace_info->debug_info() != nullptr) { - return trace_info->debug_info()->unique_id_through_copy(); + auto info = trace_info(); + if (info != nullptr) { + if (info->isa() && info->debug_info() != nullptr) { + return info->debug_info()->unique_id_through_copy(); } } return unique_id(); diff --git a/mindspore/ccsrc/debug/info.h b/mindspore/ccsrc/debug/info.h index 9ed216277e..c09c6031b3 100644 --- a/mindspore/ccsrc/debug/info.h +++ b/mindspore/ccsrc/debug/info.h @@ -118,7 +118,7 @@ class TraceContext { void set_location(const LocationPtr &loc) { location_ = loc; } LocationPtr location() { return location_; } void set_trace_info(const TraceInfoPtr &trace_info) { trace_info_ = trace_info; } - TraceInfoPtr trace_info() { return trace_info_; } + TraceInfoPtr trace_info() const { return trace_info_; } void set_func_name(const std::string &func_name) { func_name_ = func_name; } std::string func_name() { return func_name_; } }; @@ -139,7 +139,7 @@ class DebugInfo : public Base { std::string get_id() { return std::to_string(debug_id()); } void set_trace_info(const TraceInfoPtr &trace_info) { trace_info_ = trace_info; } - TraceInfoPtr trace_info() { return trace_info_; } + TraceInfoPtr trace_info() const { return trace_info_; } void set_location(const LocationPtr &loc) { location_ = loc; } virtual LocationPtr location() { return location_; } std::string name() { return name_; } diff --git a/mindspore/ccsrc/ir/func_graph.h b/mindspore/ccsrc/ir/func_graph.h index 1a367bde92..ca8e27c428 100644 --- a/mindspore/ccsrc/ir/func_graph.h +++ b/mindspore/ccsrc/ir/func_graph.h @@ -57,9 +57,6 @@ class AbstractFunction; using AbstractFunctionPtr = std::shared_ptr; } // namespace abstract -class FuncGraphManager; -using FuncGraphManagerPtr = std::shared_ptr; - // ANF transform class // either a primitive or a func_graph class FuncGraphTransform { diff --git a/mindspore/ccsrc/ir/manager.cc b/mindspore/ccsrc/ir/manager.cc index 19ca940ff2..2a76cecd64 100644 --- a/mindspore/ccsrc/ir/manager.cc +++ b/mindspore/ccsrc/ir/manager.cc @@ -464,7 +464,7 @@ void FuncGraphManager::MoveAllCNodeDropGraph(FuncGraphPtr source, FuncGraphPtr t } } -inline void FuncGraphManager::AddEdge(AnfNodePtr node, int index, AnfNodePtr input) { +void FuncGraphManager::AddEdge(AnfNodePtr node, int index, AnfNodePtr input) { auto fg = node->func_graph(); if (input->isa()) { fg->AddValueNode(input); @@ -485,7 +485,7 @@ inline void FuncGraphManager::AddEdge(AnfNodePtr node, int index, AnfNodePtr inp } } -inline void FuncGraphManager::DropEdge(AnfNodePtr node, int index, AnfNodePtr input) { +void FuncGraphManager::DropEdge(AnfNodePtr node, int index, AnfNodePtr input) { auto fg = node->func_graph(); if (input->isa()) { fg->DropValueNode(input); @@ -506,7 +506,7 @@ inline void FuncGraphManager::DropEdge(AnfNodePtr node, int index, AnfNodePtr in } } -inline void FuncGraphManager::MoveAllNodes(FuncGraphPtr source, FuncGraphPtr target) { +void FuncGraphManager::MoveAllNodes(FuncGraphPtr source, FuncGraphPtr target) { target->CopyNodes(source); target->CopyValueNodes(source); target->CopyFuncGraphCNodesIndex(source); diff --git a/mindspore/ccsrc/ir/tensor.cc b/mindspore/ccsrc/ir/tensor.cc index fce4bc1bfc..537c23dc29 100644 --- a/mindspore/ccsrc/ir/tensor.cc +++ b/mindspore/ccsrc/ir/tensor.cc @@ -28,9 +28,7 @@ #include "pipeline/static_analysis/abstract_value.h" namespace mindspore { - namespace tensor { - void DataBuf2Contiguous(const py::array &src, py::array *const dest) { if (dest == nullptr) { MS_LOG(EXCEPTION) << "Failed to copy data to a contiguous buffer as dest is nullptr!"; @@ -493,6 +491,5 @@ REGISTER_PYBIND_DEFINE(Tensor, ([](const py::module *m) { .def("dtype", &MetaTensor::Dtype, "Get the MetaTensor's dtype.") .def("shape", &MetaTensor::shape, "Get the MetaTensor's shape."); })); - } // namespace tensor } // namespace mindspore diff --git a/mindspore/ccsrc/ir/tensor.h b/mindspore/ccsrc/ir/tensor.h index 7b3725f5f0..700dcd4910 100644 --- a/mindspore/ccsrc/ir/tensor.h +++ b/mindspore/ccsrc/ir/tensor.h @@ -34,9 +34,7 @@ namespace py = pybind11; using float16 = Eigen::half; namespace pybind11 { - namespace detail { - // Similar to enums in `pybind11/numpy.h`. Determined by doing: // python3 -c 'import numpy as np; print(np.dtype(np.float16).num)' constexpr int NPY_FLOAT16 = 23; @@ -85,7 +83,6 @@ template <> struct type_caster : public npy_scalar_caster { static constexpr auto name = "float16"; }; - } // namespace detail } // namespace pybind11 @@ -96,7 +93,6 @@ using DeviceAddressPtr = std::shared_ptr; // mindspore namespace is the top level namespace of Mindsporeession project. // Other namespace should be a sub namespace of mindspore namespace in the ME project. namespace mindspore { - // brief mindspore::tensor namespace // // A sub namespace in ME to support tensor related definition. @@ -273,7 +269,6 @@ class Tensor : public MetaTensor { using TensorPtr = std::shared_ptr; using TensorPtrList = std::vector>; - } // namespace tensor } // namespace mindspore diff --git a/mindspore/ccsrc/minnie/param_value_minnie.h b/mindspore/ccsrc/minnie/param_value_minnie.h index 684d8abd5d..da2b157503 100644 --- a/mindspore/ccsrc/minnie/param_value_minnie.h +++ b/mindspore/ccsrc/minnie/param_value_minnie.h @@ -39,6 +39,5 @@ class ParamValueMinnie : public ParamValue { }; using ParamValueMinniePtr = std::shared_ptr; - } // namespace mindspore #endif // MINDSPORE_CCSRC_MINNIE_PARAM_VALUE_MINNIE_H_ diff --git a/mindspore/ccsrc/minnie/tensor_minnie.h b/mindspore/ccsrc/minnie/tensor_minnie.h index 1d4ff705d2..25d94b7076 100644 --- a/mindspore/ccsrc/minnie/tensor_minnie.h +++ b/mindspore/ccsrc/minnie/tensor_minnie.h @@ -70,7 +70,6 @@ class TensorMinnie : public MetaTensor { }; using TensorMinniePtr = std::shared_ptr; - } // namespace tensor } // namespace mindspore diff --git a/mindspore/ccsrc/operator/composite/multitype_funcgraph.cc b/mindspore/ccsrc/operator/composite/multitype_funcgraph.cc index e3957d044a..88b3134508 100644 --- a/mindspore/ccsrc/operator/composite/multitype_funcgraph.cc +++ b/mindspore/ccsrc/operator/composite/multitype_funcgraph.cc @@ -39,7 +39,6 @@ namespace mindspore { // namespace to support composite operators definition namespace prim { - MultitypeFuncGraph::MultitypeFuncGraph(const std::string &name) : MetaFuncGraph(name) { fn_cache_.clear(); signatures_ = std::vector({// def multitype(*args:ref): @@ -148,6 +147,5 @@ REGISTER_PYBIND_DEFINE(MultitypeFuncGraph_, ([](const py::module *m) { .def(py::init()) .def("register_fn", &MultitypeFuncGraph::PyRegister); })); - } // namespace prim } // namespace mindspore diff --git a/mindspore/ccsrc/operator/composite/multitype_funcgraph.h b/mindspore/ccsrc/operator/composite/multitype_funcgraph.h index b38625d62c..feb38f17ba 100644 --- a/mindspore/ccsrc/operator/composite/multitype_funcgraph.h +++ b/mindspore/ccsrc/operator/composite/multitype_funcgraph.h @@ -34,7 +34,6 @@ namespace mindspore { // namespace to support composite operators definition namespace prim { - class MultitypeFuncGraph : public MetaFuncGraph { public: explicit MultitypeFuncGraph(const std::string &name); @@ -59,7 +58,6 @@ class MultitypeFuncGraph : public MetaFuncGraph { std::unordered_map fn_cache_py_; }; using MultitypeFuncGraphPtr = std::shared_ptr; - } // namespace prim } // namespace mindspore diff --git a/mindspore/ccsrc/optimizer/opt.cc b/mindspore/ccsrc/optimizer/opt.cc index 96b9d28f9b..c9f5803902 100644 --- a/mindspore/ccsrc/optimizer/opt.cc +++ b/mindspore/ccsrc/optimizer/opt.cc @@ -88,7 +88,7 @@ AnfNodePtr Substitution::operator()(const OptimizerPtr &optimizer, const AnfNode return result; } -inline bool isTraversable(const AnfNodePtr &node) { +static bool isTraversable(const AnfNodePtr &node) { if (node == nullptr) { return false; } diff --git a/mindspore/ccsrc/utils/primitive_utils.cc b/mindspore/ccsrc/utils/primitive_utils.cc index cfbfdebac7..d1b9e1af12 100644 --- a/mindspore/ccsrc/utils/primitive_utils.cc +++ b/mindspore/ccsrc/utils/primitive_utils.cc @@ -41,7 +41,7 @@ py::function GetComputeFunction(std::string name) { if (!py::hasattr(mod, common::SafeCStr(name))) { PyErr_SetString(PyExc_NotImplementedError, common::SafeCStr(name)); // If raise AttributeError, user can't understand. This case need raise NotImplementedError. - throw py::error_already_set(); + throw(py::error_already_set()); } py::object fn = mod.attr(common::SafeCStr(name)); return fn; diff --git a/mindspore/ccsrc/vm/vm.cc b/mindspore/ccsrc/vm/vm.cc index 7107212b6c..42daf3417f 100644 --- a/mindspore/ccsrc/vm/vm.cc +++ b/mindspore/ccsrc/vm/vm.cc @@ -619,7 +619,7 @@ void FinalVM::SyncData(const py::object &arg) { BaseRef FinalVM::RunHook(const PrimitivePtr &prim, const VectorRef &args) { MS_LOG(DEBUG) << "input for operation:"; std::size_t args_size = args.size(); - py::tuple py_args = py::tuple(args_size); + auto py_args = py::tuple(args_size); size_t i = 0; for (auto &arg : args) { py_args[i] = BaseRefToPyData(arg); @@ -643,7 +643,7 @@ BaseRef FinalVM::RunHook(const PrimitivePtr &prim, const VectorRef &args) { std::string cell_id = GetValue(prim->GetAttr("cell_id")); if (_hook_grad.find(cell_id) != _hook_grad.end()) { std::size_t hook_args_size = 3; - py::tuple hook_args = py::tuple(hook_args_size); + auto hook_args = py::tuple(hook_args_size); hook_args[0] = cell_id; hook_args[1] = py::make_tuple(_hook_grad[cell_id]); hook_args[2] = py::make_tuple(py_args[2]);