Browse Source

!6489 adjust order of split allreduce for bert_thor

Merge pull request !6489 from wangshuangling/master
tags/v1.0.0
mindspore-ci-bot Gitee 5 years ago
parent
commit
deb961e80d
1 changed files with 1 additions and 1 deletions
  1. +1
    -1
      model_zoo/official/nlp/bert_thor/run_pretrain.py

+ 1
- 1
model_zoo/official/nlp/bert_thor/run_pretrain.py View File

@@ -153,8 +153,8 @@ def run_pretrain():
device_num = D.get_group_size() device_num = D.get_group_size()
rank = D.get_rank() rank = D.get_rank()
ckpt_save_dir = args_opt.save_checkpoint_path + 'ckpt_' + str(rank) + '/' ckpt_save_dir = args_opt.save_checkpoint_path + 'ckpt_' + str(rank) + '/'
_set_bert_all_reduce_split()
context.reset_auto_parallel_context() context.reset_auto_parallel_context()
_set_bert_all_reduce_split()
context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True, context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True,
device_num=device_num) device_num=device_num)




Loading…
Cancel
Save