Browse Source

!325 fix BatchNorm1d and BatchNorm2d doc and bug of slog

Merge pull request !325 from fary86/fix-batchnorm-doc-and-slog-path-duplication
tags/v0.2.0-alpha
mindspore-ci-bot Gitee 5 years ago
parent
commit
dc20a1dcba
2 changed files with 9 additions and 0 deletions
  1. +7
    -0
      mindspore/ccsrc/utils/log_adapter.cc
  2. +2
    -0
      mindspore/nn/layer/normalization.py

+ 7
- 0
mindspore/ccsrc/utils/log_adapter.cc View File

@@ -96,6 +96,13 @@ static int GetGlogLevel(MsLogLevel level) {
}
}
#else

#undef Dlog
#define Dlog(module_id, level, format, ...) \
do { \
DlogInner((module_id), (level), (format), ##__VA_ARGS__); \
} while (0)

// convert MsLogLevel to corresponding slog level
static int GetSlogLevel(MsLogLevel level) {
switch (level) {


+ 2
- 0
mindspore/nn/layer/normalization.py View File

@@ -136,6 +136,7 @@ class BatchNorm1d(_BatchNorm):
eps (float): A value added to the denominator for numerical stability. Default: 1e-5.
momentum (float): A floating hyperparameter of the momentum for the
running_mean and running_var computation. Default: 0.9.
affine (bool): A bool value when set to True, gamma and beta can be learnable. Default: True.
gamma_init (Union[Tensor, str, Initializer, numbers.Number]): Initializer for the gamma weight.
The values of str refer to the function `initializer` including 'zeros', 'ones', 'xavier_uniform',
'he_uniform', etc. Default: 'ones'.
@@ -187,6 +188,7 @@ class BatchNorm2d(_BatchNorm):
eps (float): A value added to the denominator for numerical stability. Default: 1e-5.
momentum (float): A floating hyperparameter of the momentum for the
running_mean and running_var computation. Default: 0.9.
affine (bool): A bool value when set to True, gamma and beta can be learnable. Default: True.
gamma_init (Union[Tensor, str, Initializer, numbers.Number]): Initializer for the gamma weight.
The values of str refer to the function `initializer` including 'zeros', 'ones', 'xavier_uniform',
'he_uniform', etc. Default: 'ones'.


Loading…
Cancel
Save