Browse Source

disable comm fusion in parallel optimizer temp

tags/v1.1.0
Ziyan 5 years ago
parent
commit
adc92496e8
2 changed files with 4 additions and 3 deletions
  1. +2
    -1
      mindspore/ccsrc/frontend/parallel/step_parallel.cc
  2. +2
    -2
      mindspore/nn/optim/optimizer.py

+ 2
- 1
mindspore/ccsrc/frontend/parallel/step_parallel.cc View File

@@ -1287,7 +1287,8 @@ void ApplyParallelOptOnParam(TensorLayout *tensor_layout, const OperatorInfoPtr
auto allgather = cnode->input(index)->cast<CNodePtr>();
auto prim = GetValueNode<PrimitivePtr>(allgather->input(0));
auto attrs = prim->attrs();
attrs["fusion"] = MakeValue(1);
// enable fusion flag later when it's supported in backend
attrs["fusion"] = MakeValue(0);
prim->SetAttrs(attrs);
MS_LOG(INFO) << "Parallel optimizer is applied on " << parameter->ToString();
} else {


+ 2
- 2
mindspore/nn/optim/optimizer.py View File

@@ -151,8 +151,8 @@ class Optimizer(Cell):
if context.get_auto_parallel_context("enable_parallel_optimizer"):
if _get_parallel_mode() == ParallelMode.DATA_PARALLEL:
self.use_parallel = True
elif _get_parallel_mode() == ParallelMode.STAND_ALONE:
raise RuntimeError("Parallel optimizer is not supported in stand alone mode.")
elif _get_parallel_mode() in (ParallelMode.STAND_ALONE, ParallelMode.HYBRID_PARALLEL):
raise RuntimeError("Parallel optimizer is not supported in {}.".format(_get_parallel_mode()))
else:
self.use_parallel = False
else:


Loading…
Cancel
Save