From 9d6ff7ffd04777d1bf30f8b2ee23a6b54b3f2db3 Mon Sep 17 00:00:00 2001 From: zhaojichen Date: Wed, 22 Apr 2020 21:41:15 -0400 Subject: [PATCH] fix batchnorm bug --- mindspore/nn/layer/normalization.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/mindspore/nn/layer/normalization.py b/mindspore/nn/layer/normalization.py index b9e9d6ebb7..3ef2381ba1 100644 --- a/mindspore/nn/layer/normalization.py +++ b/mindspore/nn/layer/normalization.py @@ -366,15 +366,15 @@ class GlobalBatchNorm(_BatchNorm): use_batch_statistics=True, group=1): super(GlobalBatchNorm, self).__init__(num_features, - eps, - momentum, - affine, - gamma_init, - beta_init, - moving_mean_init, - moving_var_init, - use_batch_statistics, - group) + eps, + momentum, + affine, + gamma_init, + beta_init, + moving_mean_init, + moving_var_init, + use_batch_statistics, + group) self.group = check_int_positive(group) if self.group <= 1: raise ValueError("the number of group must be greater than 1.")