Browse Source

Fix doc error

tags/v1.1.0
huangxinjing 5 years ago
parent
commit
2fa6a3b3c2
2 changed files with 8 additions and 4 deletions
  1. +4
    -4
      mindspore/context.py
  2. +4
    -0
      mindspore/parallel/_auto_parallel_context.py

+ 4
- 4
mindspore/context.py View File

@@ -333,7 +333,7 @@ def _context():
strategy_ckpt_save_file=str, full_batch=bool, enable_parallel_optimizer=bool,
all_reduce_fusion_config=list, pipeline_stages=int)
def set_auto_parallel_context(**kwargs):
"""
r"""
Set auto parallel context.

Auto parallel context should be configured before the initialization of your network.
@@ -349,15 +349,15 @@ def set_auto_parallel_context(**kwargs):
Some configurations are parallel mode specific, see the below table for details:

=========================== =========================== =================
Common AUTO_PARALLEL DATA_PRALLEL
Common AUTO_PARALLEL DATA_PARALLEL
=========================== =========================== =================
device_num gradient_fp32_sync enable_parallel_optimizer
global_rank loss_repeated_mean
gradients_mean auto_parallel_search_mode
parallel_mode strategy_ckpt_load_file
all_reduce_fusion_config strategy_ckpt_save_file
full_batch
pipeline_stages
\ full_batch
\ pipeline_stages
=========================== =========================== =================

Args:


+ 4
- 0
mindspore/parallel/_auto_parallel_context.py View File

@@ -552,6 +552,10 @@ def _set_auto_parallel_context(**kwargs):
full_batch (bool): Whether to load the whole batch on each device. Default: False.
enable_parallel_optimizer (bool): Enable using optimizer segmentation or not. Default: False.
all_reduce_fusion_config (list): Set allreduce fusion strategy by parameters indices.
pipeline_stages (int): Set the stage information for pipeline parallel. This indicates how
the devices are distributed alone the pipeline. The total devices will be divided into
'pipeline_stags' stages. This currently could only be used when
parall mode semi_auto_parallel is enabled. Default: 0

Raises:
ValueError: If input key is not attribute in auto parallel context.


Loading…
Cancel
Save