diff --git a/mindspore/nn/layer/normalization.py b/mindspore/nn/layer/normalization.py index f8a914967e..9971a6cf5d 100644 --- a/mindspore/nn/layer/normalization.py +++ b/mindspore/nn/layer/normalization.py @@ -365,10 +365,16 @@ class BatchNorm2d(_BatchNorm): The values of str refer to the function `initializer` including 'zeros', 'ones', etc. Default: 'zeros'. moving_var_init (Union[Tensor, str, Initializer, numbers.Number]): Initializer for the moving variance. The values of str refer to the function `initializer` including 'zeros', 'ones', etc. Default: 'ones'. - use_batch_statistics (bool): If true, use the mean value and variance value of current batch data. If false, - use the mean value and variance value of specified value. If None, the training process will use the mean - and variance of current batch data and track the running mean and variance, the evaluation process will use - the running mean and variance. Default: None. + use_batch_statistics (bool): + + - If true, use the mean value and variance value of current batch data and track running mean + and running varance. + - If false, use the mean value and variance value of specified value, and not track statistical value. + - If None, The use_batch_statistics is automatically assigned process according to + the training and eval mode. During training, batchnorm2d process will be the same + with use_batch_statistics=True. Contrarily, in eval, batchnorm2d process will be the same + with use_batch_statistics=False. + data_format (str): The optional value for data format, is 'NHWC' or 'NCHW'. Default: 'NCHW'. diff --git a/mindspore/nn/sparse/sparse.py b/mindspore/nn/sparse/sparse.py index ce50905cf8..d6cad5d56f 100644 --- a/mindspore/nn/sparse/sparse.py +++ b/mindspore/nn/sparse/sparse.py @@ -29,20 +29,23 @@ class SparseToDense(Cell): Returns: Tensor, the tensor converted. + Supported Platforms: + ``CPU`` + Examples: - >>> class SparseToDenseCell(nn.Cell): - ... def __init__(self, dense_shape): - ... super(SparseToDenseCell, self).__init__() - ... self.dense_shape = dense_shape - ... self.sparse_to_dense = nn.SparseToDense() - ... def construct(self, indices, values): - ... sparse = SparseTensor(indices, values, self.dense_shape) - ... return self.sparse_to_dense(sparse) - ... + >>> import mindspore as ms + >>> from mindspore import Tensor, SparseTensor + >>> import mindspore.nn as nn >>> indices = Tensor([[0, 1], [1, 2]]) - >>> values = Tensor([1, 2], dtype=ms.float32) + >>> values = Tensor([1, 2], dtype=ms.int32) >>> dense_shape = (3, 4) - >>> SparseToDenseCell(dense_shape)(indices, values) + >>> sparse_tensor = SparseTensor(indices, values, dense_shape) + >>> sparse_to_dense = nn.SparseToDense() + >>> result = sparse_to_dense(sparse_tensor) + >>> print(result) + [[0 1 0 0] + [0 0 2 0] + [0 0 0 0]] """ def __init__(self): super(SparseToDense, self).__init__()