From ae325f2e538c1bfc22ad8982909a1e36086225c8 Mon Sep 17 00:00:00 2001 From: lihongkang <[lihongkang1@huawei.com]> Date: Sat, 16 Jan 2021 16:36:05 +0800 Subject: [PATCH] fix bugs --- mindspore/nn/layer/activation.py | 2 +- mindspore/nn/layer/normalization.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mindspore/nn/layer/activation.py b/mindspore/nn/layer/activation.py index 3e8f7704a2..5009006666 100644 --- a/mindspore/nn/layer/activation.py +++ b/mindspore/nn/layer/activation.py @@ -392,7 +392,7 @@ class GELU(Cell): class FastGelu(Cell): r""" - fast Gaussian error linear unit activation function. + Fast Gaussian error linear unit activation function. Applies FastGelu function to each element of the input. The input is a Tensor with any valid shape. diff --git a/mindspore/nn/layer/normalization.py b/mindspore/nn/layer/normalization.py index 0d1bae5a19..518741b7e9 100644 --- a/mindspore/nn/layer/normalization.py +++ b/mindspore/nn/layer/normalization.py @@ -284,7 +284,7 @@ class BatchNorm1d(_BatchNorm): Tensor, the normalized, scaled, offset tensor, of shape :math:`(N, C_{out})`. Supported Platforms: - ``Ascend`` + ``Ascend`` ``GPU`` Examples: >>> net = nn.BatchNorm1d(num_features=4)