From 75ca84d260af194ea6a4dd048c1b6d1ee74601e5 Mon Sep 17 00:00:00 2001 From: Yi Huaijie Date: Thu, 14 May 2020 09:13:19 +0800 Subject: [PATCH] INFO user when set_strategy not under [semi_]auto_parallel mode --- mindspore/ccsrc/parallel/ops_info/ops_utils.h | 1 + mindspore/ccsrc/parallel/step_parallel.cc | 33 +++++++++++++++++++ mindspore/ops/primitive.py | 1 + 3 files changed, 35 insertions(+) diff --git a/mindspore/ccsrc/parallel/ops_info/ops_utils.h b/mindspore/ccsrc/parallel/ops_info/ops_utils.h index 9b7aceba86..44c504c242 100644 --- a/mindspore/ccsrc/parallel/ops_info/ops_utils.h +++ b/mindspore/ccsrc/parallel/ops_info/ops_utils.h @@ -48,6 +48,7 @@ constexpr double INF = 1e20; constexpr char AUTO_PARALLEL_RUN_ONCE_ONLY[] = "auto_parallel_run_once_only"; constexpr char SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY[] = "semi_auto_parallel_run_once_only"; +constexpr char CHECK_SET_STRATEGY_VALID_ONCE_ONLY[] = "check_set_strategy_valid_once_only"; constexpr char STRATEGY[] = "strategy"; constexpr char GEN_STRATEGY[] = "gen_strategy"; constexpr char REDUCE_OP_SUM[] = "sum"; diff --git a/mindspore/ccsrc/parallel/step_parallel.cc b/mindspore/ccsrc/parallel/step_parallel.cc index a344607362..37060045ca 100644 --- a/mindspore/ccsrc/parallel/step_parallel.cc +++ b/mindspore/ccsrc/parallel/step_parallel.cc @@ -333,6 +333,28 @@ bool StrategyFound(std::unordered_map attrs) { return !((iter == attrs.end()) || (iter->second->type_name() == NONE)); } +bool HasStrategy(const FuncGraphPtr &root) { + AnfNodePtr ret = root->get_return(); + MS_EXCEPTION_IF_NULL(ret); + std::vector all_nodes = DeepScopedGraphSearch(ret); + + for (auto &node : all_nodes) { + auto cnode = node->cast(); + if ((cnode == nullptr) || !IsValueNode(cnode->input(0))) { + continue; + } + + ValueNodePtr prim_anf_node = cnode->input(0)->cast(); + PrimitivePtr prim = GetValueNode(prim_anf_node); + auto attrs = prim->attrs(); + if (StrategyFound(attrs)) { + return true; + } + } + + return false; +} + bool IsCommunicationOp(const PrimitivePtr &prim) { MS_EXCEPTION_IF_NULL(prim); return (COMMUNICATION_OPS.find(prim->name()) != COMMUNICATION_OPS.end()); @@ -2225,6 +2247,14 @@ bool StepParallel(const FuncGraphPtr &root, const opt::OptimizerPtr &optimizer) // control whether use model_parallel mode if (!root->has_flag(AUTO_PARALLEL) || ((parallel_mode != AUTO_PARALLEL) && (parallel_mode != SEMI_AUTO_PARALLEL)) || (root->has_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY))) { + if (!root->has_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY)) { + if (HasStrategy(root)) { + MS_LOG(INFO) << "strategies ignored in " << parallel_mode + << ", set_strategy() only valid in [semi_]auto_parallel."; + } + root->flags()[CHECK_SET_STRATEGY_VALID_ONCE_ONLY] = true; + } + return changes; } @@ -2282,6 +2312,9 @@ bool StepParallel(const FuncGraphPtr &root, const opt::OptimizerPtr &optimizer) root->flags()[SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY] = true; res->results()[pipeline::kStepParallelGraph] = root; + // in auto parallel mode, no need to check if stategies set + root->flags()[CHECK_SET_STRATEGY_VALID_ONCE_ONLY] = true; + (void)gettimeofday(&end_time, nullptr); uint64_t time = kUSecondInSecond * static_cast(end_time.tv_sec - start_time.tv_sec); time += static_cast(end_time.tv_usec - start_time.tv_usec); diff --git a/mindspore/ops/primitive.py b/mindspore/ops/primitive.py index 78e8778c52..95e148204b 100644 --- a/mindspore/ops/primitive.py +++ b/mindspore/ops/primitive.py @@ -111,6 +111,7 @@ class Primitive(Primitive_): Note: Valid only in semi auto parallel or auto parallel mode. + In other parallel modes, strategies will be ignored if set. Args: strategy (tuple): Strategy describes the distributed parallel mode of the current primitive.