Browse Source

fix globalbatchnorm bug

tags/v1.0.0
zhaojichen 5 years ago
parent
commit
d8ceb23e62
1 changed files with 2 additions and 2 deletions
  1. +2
    -2
      mindspore/nn/layer/normalization.py

+ 2
- 2
mindspore/nn/layer/normalization.py View File

@@ -410,7 +410,7 @@ class GlobalBatchNorm(_BatchNorm):

Args:
num_features (int): `C` from an expected input of size (N, C, H, W).
device_num_each_group (int): The number of devices in each group. Default: 1.
device_num_each_group (int): The number of devices in each group. Default: 2.
eps (float): A value added to the denominator for numerical stability. Default: 1e-5.
momentum (float): A floating hyperparameter of the momentum for the
running_mean and running_var computation. Default: 0.9.
@@ -453,7 +453,7 @@ class GlobalBatchNorm(_BatchNorm):
moving_mean_init='zeros',
moving_var_init='ones',
use_batch_statistics=None,
device_num_each_group=1):
device_num_each_group=2):
super(GlobalBatchNorm, self).__init__(num_features,
eps,
momentum,


Loading…
Cancel
Save