Browse Source

!23328 Fix error message

Merge pull request !23328 from huangxinjing/fix_error_msg
tags/v1.5.0-rc1
i-robot Gitee 4 years ago
parent
commit
7828fb0028
2 changed files with 4 additions and 4 deletions
  1. +1
    -1
      mindspore/parallel/nn/layers.py
  2. +3
    -3
      mindspore/parallel/nn/transformer.py

+ 1
- 1
mindspore/parallel/nn/layers.py View File

@@ -89,7 +89,7 @@ def _valid_value_checks(types, class_name):
@constexpr
def _check_input_shape(input_shape, param_name, func_name, target_len):
if len(input_shape) != target_len:
raise ValueError(f"{func_name} {param_name} should be 2d, but got shape {input_shape}")
raise ValueError(f"{func_name} {param_name} should be {target_len}d, but got shape {input_shape}")
return True




+ 3
- 3
mindspore/parallel/nn/transformer.py View File

@@ -354,7 +354,7 @@ class FeedForward(Cell):
raise ValueError(f"hidden_size {hidden_size} should be a multiple of the model parallel way {mp}")
if dropout_rate < 0 or dropout_rate >= 1:
raise ValueError(f"dropout_rate probability should be a number in range [0, 1.0), "
"but got {dropout_rate}")
f"but got {dropout_rate}")
input_size = hidden_size
output_size = ffn_hidden_size
# Here, 'ep' stands for expert parallel number, which is equal to data parallel number.
@@ -774,10 +774,10 @@ class MultiHeadAttention(Cell):
self.batch_size = batch_size
if hidden_dropout_rate < 0 or hidden_dropout_rate >= 1:
raise ValueError(f"hidden_dropout_rate probability should be a number in range [0, 1.0), "
"but got {hidden_dropout_rate}")
f"but got {hidden_dropout_rate}")
if attention_dropout_rate < 0 or attention_dropout_rate >= 1:
raise ValueError(f"attention_dropout_rate probability should be a number in range [0, 1.0), "
"but got {attention_dropout_rate}")
f"but got {attention_dropout_rate}")
if hidden_size % num_heads != 0:
raise ValueError(f"The hidden size {hidden_size} should be a multiple of num_heads {num_heads}")
if num_heads % parallel_config.model_parallel != 0:


Loading…
Cancel
Save