diff --git a/mindspore/nn/layer/activation.py b/mindspore/nn/layer/activation.py index 3e8f7704a2..5009006666 100644 --- a/mindspore/nn/layer/activation.py +++ b/mindspore/nn/layer/activation.py @@ -392,7 +392,7 @@ class GELU(Cell): class FastGelu(Cell): r""" - fast Gaussian error linear unit activation function. + Fast Gaussian error linear unit activation function. Applies FastGelu function to each element of the input. The input is a Tensor with any valid shape. diff --git a/mindspore/nn/layer/normalization.py b/mindspore/nn/layer/normalization.py index 32ceb70e7f..a19bdca97b 100644 --- a/mindspore/nn/layer/normalization.py +++ b/mindspore/nn/layer/normalization.py @@ -267,7 +267,7 @@ class BatchNorm1d(_BatchNorm): Tensor, the normalized, scaled, offset tensor, of shape :math:`(N, C_{out})`. Supported Platforms: - ``Ascend`` + ``Ascend`` ``GPU`` Examples: >>> net = nn.BatchNorm1d(num_features=4)