Browse Source

!15696 fix bot warnings

From: @Margaret_wangrui
Reviewed-by: @ginfung,@zh_qh
Signed-off-by: @zh_qh
pull/15696/MERGE
mindspore-ci-bot Gitee 4 years ago
parent
commit
a5176a4a17
9 changed files with 8 additions and 9 deletions
  1. +1
    -1
      mindspore/ccsrc/backend/optimizer/common/fusion_id_allocator.cc
  2. +1
    -1
      mindspore/ccsrc/backend/optimizer/common/fusion_id_allocator.h
  3. +1
    -1
      mindspore/ccsrc/backend/session/ascend_auto_monad.cc
  4. +1
    -1
      mindspore/ccsrc/backend/session/session_basic.cc
  5. +1
    -1
      mindspore/ccsrc/backend/session/session_basic.h
  6. +0
    -1
      mindspore/ccsrc/frontend/optimizer/irpass/load_eliminate.h
  7. +1
    -1
      mindspore/ccsrc/pipeline/jit/static_analysis/auto_monad.cc
  8. +1
    -1
      mindspore/ccsrc/runtime/device/kernel_runtime.cc
  9. +1
    -1
      mindspore/ccsrc/runtime/device/kernel_runtime.h

+ 1
- 1
mindspore/ccsrc/backend/optimizer/common/fusion_id_allocator.cc View File

@@ -29,7 +29,7 @@ int64_t FusionIdAllocator::AllocateFusionId() {
return fusion_id; return fusion_id;
} }


bool FusionIdAllocator::HasFusionIdAttr(const AnfNodePtr &node) {
bool FusionIdAllocator::HasFusionIdAttr(const AnfNodePtr &node) const {
MS_EXCEPTION_IF_NULL(node); MS_EXCEPTION_IF_NULL(node);
if (!node->isa<CNode>()) { if (!node->isa<CNode>()) {
return false; return false;


+ 1
- 1
mindspore/ccsrc/backend/optimizer/common/fusion_id_allocator.h View File

@@ -30,7 +30,7 @@ class FusionIdAllocator {


void Init(); void Init();
int64_t AllocateFusionId(); int64_t AllocateFusionId();
bool HasFusionIdAttr(const AnfNodePtr &node);
bool HasFusionIdAttr(const AnfNodePtr &node) const;
int64_t GetFusionId(const AnfNodePtr &node); int64_t GetFusionId(const AnfNodePtr &node);
void SetFusionId(const AnfNodePtr &node, int64_t id); void SetFusionId(const AnfNodePtr &node, int64_t id);




+ 1
- 1
mindspore/ccsrc/backend/session/ascend_auto_monad.cc View File

@@ -1468,7 +1468,7 @@ class ExecuteOrderGenerator {
return input; return input;
} }


void RemoveSameInputsAssigns(std::vector<CNodePtr> *exec_order) {
void RemoveSameInputsAssigns(std::vector<CNodePtr> *exec_order) const {
for (auto iter = exec_order->begin(); iter != exec_order->end();) { for (auto iter = exec_order->begin(); iter != exec_order->end();) {
auto &node = *iter; auto &node = *iter;
auto &inputs = node->inputs(); auto &inputs = node->inputs();


+ 1
- 1
mindspore/ccsrc/backend/session/session_basic.cc View File

@@ -704,7 +704,7 @@ AnfNodePtr SessionBasic::CreateNewParameterFromCNode(const AnfNodePtr &anf, Kern
return CreateParameterFromTuple(anf, graph); return CreateParameterFromTuple(anf, graph);
} }


void SessionBasic::GetCNodeInfo(const CNodePtr &cnode, std::vector<AnfNodePtr> *cnode_inputs) {
void SessionBasic::GetCNodeInfo(const CNodePtr &cnode, std::vector<AnfNodePtr> *cnode_inputs) const {
MS_EXCEPTION_IF_NULL(cnode); MS_EXCEPTION_IF_NULL(cnode);
MS_EXCEPTION_IF_NULL(cnode_inputs); MS_EXCEPTION_IF_NULL(cnode_inputs);
auto prim = AnfAlgo::GetCNodePrimitive(cnode); auto prim = AnfAlgo::GetCNodePrimitive(cnode);


+ 1
- 1
mindspore/ccsrc/backend/session/session_basic.h View File

@@ -151,7 +151,7 @@ class SessionBasic : public std::enable_shared_from_this<SessionBasic> {
std::vector<AnfNodePtr> CreateValueNode(const CNodePtr &cnode, KernelGraph *graph); std::vector<AnfNodePtr> CreateValueNode(const CNodePtr &cnode, KernelGraph *graph);
void CreateCNodeInputs(const CNodePtr &cnode, KernelGraph *graph, std::vector<AnfNodePtr> *cnode_inputs); void CreateCNodeInputs(const CNodePtr &cnode, KernelGraph *graph, std::vector<AnfNodePtr> *cnode_inputs);
std::vector<AnfNodePtr> CreateCallSwitchInputs(const CNodePtr &cnode, KernelGraph *graph); std::vector<AnfNodePtr> CreateCallSwitchInputs(const CNodePtr &cnode, KernelGraph *graph);
void GetCNodeInfo(const CNodePtr &cnode, std::vector<AnfNodePtr> *cnode_inputs);
void GetCNodeInfo(const CNodePtr &cnode, std::vector<AnfNodePtr> *cnode_inputs) const;
void GetNewCNodeInputs(const CNodePtr &cnode, KernelGraph *graph, std::vector<AnfNodePtr> *cnode_inputs, void GetNewCNodeInputs(const CNodePtr &cnode, KernelGraph *graph, std::vector<AnfNodePtr> *cnode_inputs,
std::unordered_map<AnfNodePtr, AnfNodePtr> *other_graph_cnode); std::unordered_map<AnfNodePtr, AnfNodePtr> *other_graph_cnode);
std::vector<AnfNodePtr> CreateCallSwitchLayerInputs(const CNodePtr &cnode, KernelGraph *graph); std::vector<AnfNodePtr> CreateCallSwitchLayerInputs(const CNodePtr &cnode, KernelGraph *graph);


+ 0
- 1
mindspore/ccsrc/frontend/optimizer/irpass/load_eliminate.h View File

@@ -28,6 +28,5 @@ class LoadEliminater : public AnfVisitor {
public: public:
AnfNodePtr operator()(const OptimizerPtr &, const AnfNodePtr &node) override; AnfNodePtr operator()(const OptimizerPtr &, const AnfNodePtr &node) override;
}; };

} // namespace mindspore::opt::irpass } // namespace mindspore::opt::irpass
#endif // MINDSPORE_CCSRC_FRONTEND_OPTIMIZER_IRPASS_LOAD_ELIMINATE_H_ #endif // MINDSPORE_CCSRC_FRONTEND_OPTIMIZER_IRPASS_LOAD_ELIMINATE_H_

+ 1
- 1
mindspore/ccsrc/pipeline/jit/static_analysis/auto_monad.cc View File

@@ -67,7 +67,7 @@ AnfNodePtr AddMonadParameter(const FuncGraphPtr &func_graph, const std::string &
// If io monad parameter added before u monad parameter, should insert u monad before io monad in parameters // If io monad parameter added before u monad parameter, should insert u monad before io monad in parameters
if (io_monad_location != params_size && abs->isa<abstract::AbstractUMonad>()) { if (io_monad_location != params_size && abs->isa<abstract::AbstractUMonad>()) {
std::vector<AnfNodePtr> params = func_graph->parameters(); std::vector<AnfNodePtr> params = func_graph->parameters();
(void)params.insert(params.begin() + io_monad_location, para);
(void)params.insert(params.begin() + SizeToInt(io_monad_location), para);
func_graph->set_parameters(params); func_graph->set_parameters(params);
} else { } else {
func_graph->add_parameter(para); func_graph->add_parameter(para);


+ 1
- 1
mindspore/ccsrc/runtime/device/kernel_runtime.cc View File

@@ -80,7 +80,7 @@ void KernelRuntime::RunOpAssignMemory(const std::vector<tensor::TensorPtr> &inpu
UpdateRefNodeOutputMem(graph); UpdateRefNodeOutputMem(graph);
} }


void KernelRuntime::RunOpClearMemory(const session::KernelGraph *graph) {
void KernelRuntime::RunOpClearMemory(const session::KernelGraph *graph) const {
MS_EXCEPTION_IF_NULL(graph); MS_EXCEPTION_IF_NULL(graph);
// clear input parameter memory resource // clear input parameter memory resource
for (const auto &input_node : graph->inputs()) { for (const auto &input_node : graph->inputs()) {


+ 1
- 1
mindspore/ccsrc/runtime/device/kernel_runtime.h View File

@@ -54,7 +54,7 @@ class KernelRuntime {
virtual bool Init() = 0; virtual bool Init() = 0;
virtual void AssignMemory(session::KernelGraph *graph); virtual void AssignMemory(session::KernelGraph *graph);
void RunOpAssignMemory(const std::vector<tensor::TensorPtr> &input_tensors, session::KernelGraph *graph); void RunOpAssignMemory(const std::vector<tensor::TensorPtr> &input_tensors, session::KernelGraph *graph);
void RunOpClearMemory(const session::KernelGraph *graph);
void RunOpClearMemory(const session::KernelGraph *graph) const;
static bool DumpDataEnabled(); static bool DumpDataEnabled();
static bool DumpDataEnabledIteration(); static bool DumpDataEnabledIteration();
virtual bool LoadData(session::KernelGraph *graph); virtual bool LoadData(session::KernelGraph *graph);


Loading…
Cancel
Save