From 2fa6a3b3c29c6cefefe8e05f8d7fad3f232e3568 Mon Sep 17 00:00:00 2001 From: huangxinjing Date: Mon, 28 Sep 2020 17:08:49 +0800 Subject: [PATCH] Fix doc error --- mindspore/context.py | 8 ++++---- mindspore/parallel/_auto_parallel_context.py | 4 ++++ 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/mindspore/context.py b/mindspore/context.py index cd2ed2c1d8..84b2761908 100644 --- a/mindspore/context.py +++ b/mindspore/context.py @@ -333,7 +333,7 @@ def _context(): strategy_ckpt_save_file=str, full_batch=bool, enable_parallel_optimizer=bool, all_reduce_fusion_config=list, pipeline_stages=int) def set_auto_parallel_context(**kwargs): - """ + r""" Set auto parallel context. Auto parallel context should be configured before the initialization of your network. @@ -349,15 +349,15 @@ def set_auto_parallel_context(**kwargs): Some configurations are parallel mode specific, see the below table for details: =========================== =========================== ================= - Common AUTO_PARALLEL DATA_PRALLEL + Common AUTO_PARALLEL DATA_PARALLEL =========================== =========================== ================= device_num gradient_fp32_sync enable_parallel_optimizer global_rank loss_repeated_mean gradients_mean auto_parallel_search_mode parallel_mode strategy_ckpt_load_file all_reduce_fusion_config strategy_ckpt_save_file - full_batch - pipeline_stages + \ full_batch + \ pipeline_stages =========================== =========================== ================= Args: diff --git a/mindspore/parallel/_auto_parallel_context.py b/mindspore/parallel/_auto_parallel_context.py index 01afb012fa..67074eb576 100644 --- a/mindspore/parallel/_auto_parallel_context.py +++ b/mindspore/parallel/_auto_parallel_context.py @@ -552,6 +552,10 @@ def _set_auto_parallel_context(**kwargs): full_batch (bool): Whether to load the whole batch on each device. Default: False. enable_parallel_optimizer (bool): Enable using optimizer segmentation or not. Default: False. all_reduce_fusion_config (list): Set allreduce fusion strategy by parameters indices. + pipeline_stages (int): Set the stage information for pipeline parallel. This indicates how + the devices are distributed alone the pipeline. The total devices will be divided into + 'pipeline_stags' stages. This currently could only be used when + parall mode semi_auto_parallel is enabled. Default: 0 Raises: ValueError: If input key is not attribute in auto parallel context.