diff --git a/mindspore/nn/layer/__init__.py b/mindspore/nn/layer/__init__.py index b9f79b6cf7..20ec8e17e8 100644 --- a/mindspore/nn/layer/__init__.py +++ b/mindspore/nn/layer/__init__.py @@ -17,24 +17,26 @@ Layer. The high-level components(Cells) used to construct the neural network. """ -from .activation import Softmax, LogSoftmax, ReLU, ReLU6, Tanh, GELU, ELU, Sigmoid, PReLU, get_activation, LeakyReLU, HSigmoid, HSwish -from .normalization import BatchNorm1d, BatchNorm2d, LayerNorm, GroupNorm, GlobalBatchNorm -from .container import SequentialCell, CellList -from .conv import Conv2d, Conv2dTranspose -from .lstm import LSTM -from .basic import Dropout, Flatten, Dense, ClipByNorm, Norm, OneHot, Pad, Unfold -from .embedding import Embedding -from .pooling import AvgPool2d, MaxPool2d, AvgPool1d -from .image import ImageGradients, SSIM, PSNR +from . import activation, normalization, container, conv, lstm, basic, embedding, pooling, image, quant +from .activation import * +from .normalization import * +from .container import * +from .conv import * +from .lstm import * +from .basic import * +from .embedding import * +from .pooling import * +from .image import * +from .quant import * -__all__ = ['Softmax', 'LogSoftmax', 'ReLU', 'ReLU6', 'Tanh', 'GELU', 'Sigmoid', - 'PReLU', 'get_activation', 'LeakyReLU', 'HSigmoid', 'HSwish', 'ELU', - 'BatchNorm1d', 'BatchNorm2d', 'LayerNorm', 'GroupNorm', 'GlobalBatchNorm', - 'SequentialCell', 'CellList', - 'Conv2d', 'Conv2dTranspose', - 'LSTM', - 'Dropout', 'Flatten', 'Dense', 'ClipByNorm', 'Norm', 'OneHot', - 'Embedding', - 'AvgPool2d', 'MaxPool2d', 'AvgPool1d', 'Pad', 'Unfold', - 'ImageGradients', 'SSIM', 'PSNR', - ] +__all__ = [] +__all__.extend(activation.__all__) +__all__.extend(normalization.__all__) +__all__.extend(container.__all__) +__all__.extend(conv.__all__) +__all__.extend(lstm.__all__) +__all__.extend(basic.__all__) +__all__.extend(embedding.__all__) +__all__.extend(pooling.__all__) +__all__.extend(image.__all__) +__all__.extend(quant.__all__) diff --git a/mindspore/nn/layer/activation.py b/mindspore/nn/layer/activation.py index d61bbeaadd..ef217ea11e 100644 --- a/mindspore/nn/layer/activation.py +++ b/mindspore/nn/layer/activation.py @@ -22,6 +22,21 @@ from mindspore.common.tensor import Tensor from mindspore._extends import cell_attr_register from ..cell import Cell +__all__ = ['Softmax', + 'LogSoftmax', + 'ReLU', + 'ReLU6', + 'Tanh', + 'GELU', + 'Sigmoid', + 'PReLU', + 'get_activation', + 'LeakyReLU', + 'HSigmoid', + 'HSwish', + 'ELU', + ] + class Softmax(Cell): r""" @@ -54,6 +69,7 @@ class Softmax(Cell): >>> softmax(input_x) [0.03168 0.01166 0.0861 0.636 0.2341] """ + def __init__(self, axis=-1): super(Softmax, self).__init__() self.softmax = P.Softmax(axis) @@ -128,6 +144,7 @@ class ELU(Cell): >>> elu(input_x) """ + def __init__(self, alpha=1.0): super(ELU, self).__init__() self.elu = P.Elu(alpha) @@ -156,6 +173,7 @@ class ReLU(Cell): >>> relu(input_x) [0. 2. 0. 2. 0.] """ + def __init__(self): super(ReLU, self).__init__() self.relu = P.ReLU() @@ -184,6 +202,7 @@ class ReLU6(Cell): >>> relu6(input_x) [0. 0. 0. 2. 1.] """ + def __init__(self): super(ReLU6, self).__init__() self.relu6 = P.ReLU6() @@ -221,6 +240,7 @@ class LeakyReLU(Cell): [[-0.2 4. -1.6] [ 2 -1. 9.]] """ + def __init__(self, alpha=0.2): super(LeakyReLU, self).__init__() self.greater_equal = P.GreaterEqual() @@ -262,6 +282,7 @@ class Tanh(Cell): >>> tanh(input_x) [0.7617 0.964 0.995 0.964 0.7617] """ + def __init__(self): super(Tanh, self).__init__() self.tanh = P.Tanh() @@ -293,6 +314,7 @@ class GELU(Cell): [[-1.5880802e-01 3.9999299e+00 -3.1077917e-21] [ 1.9545976e+00 -2.2918017e-07 9.0000000e+00]] """ + def __init__(self): super(GELU, self).__init__() self.gelu = P.Gelu() @@ -322,6 +344,7 @@ class Sigmoid(Cell): >>> sigmoid(input_x) [0.2688 0.11914 0.5 0.881 0.7305] """ + def __init__(self): super(Sigmoid, self).__init__() self.sigmoid = P.Sigmoid() @@ -410,6 +433,7 @@ class HSwish(Cell): >>> hswish(input_x) """ + def __init__(self): super(HSwish, self).__init__() self.hswish = P.HSwish() @@ -443,6 +467,7 @@ class HSigmoid(Cell): >>> hsigmoid(input_x) """ + def __init__(self): super(HSigmoid, self).__init__() self.hsigmoid = P.HSigmoid() diff --git a/mindspore/nn/layer/basic.py b/mindspore/nn/layer/basic.py index 6d8d287b82..77b9afbf05 100644 --- a/mindspore/nn/layer/basic.py +++ b/mindspore/nn/layer/basic.py @@ -30,6 +30,7 @@ from ..cell import Cell from .activation import get_activation from ..._checkparam import Validator as validator +__all__ = ['Dropout', 'Flatten', 'Dense', 'ClipByNorm', 'Norm', 'OneHot', 'Pad', 'Unfold'] class Dropout(Cell): r""" diff --git a/mindspore/nn/layer/container.py b/mindspore/nn/layer/container.py index 709b3ef8ef..5cb378ae6c 100644 --- a/mindspore/nn/layer/container.py +++ b/mindspore/nn/layer/container.py @@ -15,9 +15,9 @@ """container""" from collections import OrderedDict from abc import abstractmethod, ABCMeta - from ..cell import Cell +__all__ = ['SequentialCell', 'CellList'] def _valid_index(cell_num, index): if not isinstance(index, int): diff --git a/mindspore/nn/layer/conv.py b/mindspore/nn/layer/conv.py index a102a394be..fd3039a348 100644 --- a/mindspore/nn/layer/conv.py +++ b/mindspore/nn/layer/conv.py @@ -21,6 +21,7 @@ from mindspore._checkparam import check_bool, twice, check_int_positive, check_i from mindspore._extends import cell_attr_register from ..cell import Cell +__all__ = ['Conv2d', 'Conv2dTranspose'] class _Conv(Cell): """ diff --git a/mindspore/nn/layer/embedding.py b/mindspore/nn/layer/embedding.py index 24b94f2f3c..5df38b6845 100755 --- a/mindspore/nn/layer/embedding.py +++ b/mindspore/nn/layer/embedding.py @@ -21,6 +21,7 @@ from mindspore.common.initializer import initializer from ..cell import Cell from ..._checkparam import Validator as validator +__all__ = ['Embedding'] class Embedding(Cell): r""" diff --git a/mindspore/nn/layer/image.py b/mindspore/nn/layer/image.py index 7448d5fd49..7d8eef4d6f 100644 --- a/mindspore/nn/layer/image.py +++ b/mindspore/nn/layer/image.py @@ -23,6 +23,7 @@ from mindspore._checkparam import Validator as validator from mindspore._checkparam import Rel from ..cell import Cell +__all__ = ['ImageGradients', 'SSIM', 'PSNR'] class ImageGradients(Cell): r""" diff --git a/mindspore/nn/layer/lstm.py b/mindspore/nn/layer/lstm.py index bdc49739ac..06ad5154ab 100755 --- a/mindspore/nn/layer/lstm.py +++ b/mindspore/nn/layer/lstm.py @@ -19,6 +19,7 @@ from mindspore.common.parameter import Parameter from mindspore.common.initializer import initializer from mindspore._checkparam import Validator as validator +__all__ = ['LSTM'] class LSTM(Cell): r""" diff --git a/mindspore/nn/layer/normalization.py b/mindspore/nn/layer/normalization.py index 16b1e9b786..a423b86c5f 100644 --- a/mindspore/nn/layer/normalization.py +++ b/mindspore/nn/layer/normalization.py @@ -29,6 +29,8 @@ from mindspore._checkparam import check_int_positive from ..cell import Cell +__all__ = ['BatchNorm1d', 'BatchNorm2d', 'LayerNorm', 'GroupNorm', 'GlobalBatchNorm'] + class _BatchNorm(Cell): """Batch Normalization base class.""" @cell_attr_register diff --git a/mindspore/nn/layer/pooling.py b/mindspore/nn/layer/pooling.py index b0c0816ca4..927af43225 100644 --- a/mindspore/nn/layer/pooling.py +++ b/mindspore/nn/layer/pooling.py @@ -21,6 +21,7 @@ from ... import context from ..cell import Cell from ..._checkparam import Rel +__all__ = ['AvgPool2d', 'MaxPool2d', 'AvgPool1d'] class _PoolNd(Cell): """N-D AvgPool""" diff --git a/mindspore/nn/layer/_quant.py b/mindspore/nn/layer/quant.py similarity index 100% rename from mindspore/nn/layer/_quant.py rename to mindspore/nn/layer/quant.py