Browse Source

changing default value of single-loop flag

tags/v1.6.0
Xiaoda Zhang 4 years ago
parent
commit
1bdb610b34
5 changed files with 8 additions and 2 deletions
  1. +1
    -1
      mindspore/ccsrc/frontend/parallel/costmodel_context.h
  2. +2
    -0
      tests/ut/python/parallel/test_auto_parallel_for_loop.py
  3. +2
    -0
      tests/ut/python/parallel/test_auto_parallel_for_loop_multi_subgraph.py
  4. +2
    -0
      tests/ut/python/parallel/test_auto_parallel_for_loop_reshape.py
  5. +1
    -1
      tests/ut/python/parallel/test_auto_parallel_two_matmul.py

+ 1
- 1
mindspore/ccsrc/frontend/parallel/costmodel_context.h View File

@@ -46,7 +46,7 @@ namespace parallel {
#define DEFAULT_TRIANGLE_STAR_STRATEGY_OVERWRITE true;
#define DEFAULT_DP_ALGO_ENABLE_APPROX false
#define DEFAULT_DP_ALGO_APPROX_EPSILON 0.1
#define DEFAULT_DP_ALGO_SINGLE_LOOP true
#define DEFAULT_DP_ALGO_SINGLE_LOOP false

class CostModelContext {
public:


+ 2
- 0
tests/ut/python/parallel/test_auto_parallel_for_loop.py View File

@@ -22,6 +22,7 @@ from mindspore.ops import operations as P, functional as F
from mindspore.common.initializer import initializer
import mindspore.common.dtype as mstype
from mindspore.common.api import _cell_graph_executor
from mindspore.parallel._cost_model_context import _set_algo_single_loop
from tests.dataset_mock import MindData


@@ -119,6 +120,7 @@ _w1 = Tensor(np.ones([512, 128]), dtype=ms.float32)

def test_auto_parallel():
context.set_auto_parallel_context(parallel_mode="auto_parallel", device_num=16, global_rank=0)
_set_algo_single_loop(True)
net = Full(_w1, 3)
net.set_auto_parallel()
net.set_train()


+ 2
- 0
tests/ut/python/parallel/test_auto_parallel_for_loop_multi_subgraph.py View File

@@ -25,6 +25,7 @@ from mindspore.ops import functional as F
from mindspore.ops import operations as P
from mindspore.parallel._cost_model_context import _set_multi_subgraphs
from mindspore.parallel._utils import _reset_op_id as reset_op_id
from mindspore.parallel._cost_model_context import _set_algo_single_loop


class SubNet(nn.Cell):
@@ -122,6 +123,7 @@ class TrainStepWarp(nn.Cell):

def test_double_subgraphs():
context.set_auto_parallel_context(parallel_mode="auto_parallel", device_num=8, global_rank=0)
_set_algo_single_loop(True)
net = TrainStepWarp(NetWithLoss(Net()))
_set_multi_subgraphs()
net.set_auto_parallel()


+ 2
- 0
tests/ut/python/parallel/test_auto_parallel_for_loop_reshape.py View File

@@ -22,6 +22,7 @@ from mindspore.ops import operations as P, functional as F
from mindspore.common.initializer import initializer
import mindspore.common.dtype as mstype
from mindspore.common.api import _cell_graph_executor
from mindspore.parallel._cost_model_context import _set_algo_single_loop
from tests.dataset_mock import MindData


@@ -126,6 +127,7 @@ _w1 = Tensor(np.ones([512, 128, 1]), dtype=ms.float32)

def test_auto_parallel():
context.set_auto_parallel_context(parallel_mode="auto_parallel", device_num=16, global_rank=0)
_set_algo_single_loop(True)
net = Full(_w1, 3)
net.set_auto_parallel()
net.set_train()


+ 1
- 1
tests/ut/python/parallel/test_auto_parallel_two_matmul.py View File

@@ -122,7 +122,7 @@ def test_two_matmul():
algo_epsilon = get_algo_parameters("algo_approxi_epsilon")
assert algo_epsilon == 0.001

expecte_single_loop = True
expecte_single_loop = False
signle_loop = _get_algo_single_loop()
assert expecte_single_loop == signle_loop
expecte_single_loop = False


Loading…
Cancel
Save