From 1397326c461d3c03901e000debbd4facdc6163c8 Mon Sep 17 00:00:00 2001 From: chenzomi Date: Tue, 12 May 2020 11:18:16 +0800 Subject: [PATCH] add quant.py and change the format of __init__ --- mindspore/nn/layer/__init__.py | 42 +++++++++++----------- mindspore/nn/layer/activation.py | 25 +++++++++++++ mindspore/nn/layer/basic.py | 1 + mindspore/nn/layer/container.py | 2 +- mindspore/nn/layer/conv.py | 1 + mindspore/nn/layer/embedding.py | 1 + mindspore/nn/layer/image.py | 1 + mindspore/nn/layer/lstm.py | 1 + mindspore/nn/layer/normalization.py | 2 ++ mindspore/nn/layer/pooling.py | 1 + mindspore/nn/layer/{_quant.py => quant.py} | 0 11 files changed, 56 insertions(+), 21 deletions(-) rename mindspore/nn/layer/{_quant.py => quant.py} (100%) diff --git a/mindspore/nn/layer/__init__.py b/mindspore/nn/layer/__init__.py index b9f79b6cf7..20ec8e17e8 100644 --- a/mindspore/nn/layer/__init__.py +++ b/mindspore/nn/layer/__init__.py @@ -17,24 +17,26 @@ Layer. The high-level components(Cells) used to construct the neural network. """ -from .activation import Softmax, LogSoftmax, ReLU, ReLU6, Tanh, GELU, ELU, Sigmoid, PReLU, get_activation, LeakyReLU, HSigmoid, HSwish -from .normalization import BatchNorm1d, BatchNorm2d, LayerNorm, GroupNorm, GlobalBatchNorm -from .container import SequentialCell, CellList -from .conv import Conv2d, Conv2dTranspose -from .lstm import LSTM -from .basic import Dropout, Flatten, Dense, ClipByNorm, Norm, OneHot, Pad, Unfold -from .embedding import Embedding -from .pooling import AvgPool2d, MaxPool2d, AvgPool1d -from .image import ImageGradients, SSIM, PSNR +from . import activation, normalization, container, conv, lstm, basic, embedding, pooling, image, quant +from .activation import * +from .normalization import * +from .container import * +from .conv import * +from .lstm import * +from .basic import * +from .embedding import * +from .pooling import * +from .image import * +from .quant import * -__all__ = ['Softmax', 'LogSoftmax', 'ReLU', 'ReLU6', 'Tanh', 'GELU', 'Sigmoid', - 'PReLU', 'get_activation', 'LeakyReLU', 'HSigmoid', 'HSwish', 'ELU', - 'BatchNorm1d', 'BatchNorm2d', 'LayerNorm', 'GroupNorm', 'GlobalBatchNorm', - 'SequentialCell', 'CellList', - 'Conv2d', 'Conv2dTranspose', - 'LSTM', - 'Dropout', 'Flatten', 'Dense', 'ClipByNorm', 'Norm', 'OneHot', - 'Embedding', - 'AvgPool2d', 'MaxPool2d', 'AvgPool1d', 'Pad', 'Unfold', - 'ImageGradients', 'SSIM', 'PSNR', - ] +__all__ = [] +__all__.extend(activation.__all__) +__all__.extend(normalization.__all__) +__all__.extend(container.__all__) +__all__.extend(conv.__all__) +__all__.extend(lstm.__all__) +__all__.extend(basic.__all__) +__all__.extend(embedding.__all__) +__all__.extend(pooling.__all__) +__all__.extend(image.__all__) +__all__.extend(quant.__all__) diff --git a/mindspore/nn/layer/activation.py b/mindspore/nn/layer/activation.py index 8845247a65..e05466c32c 100644 --- a/mindspore/nn/layer/activation.py +++ b/mindspore/nn/layer/activation.py @@ -22,6 +22,21 @@ from mindspore.common.tensor import Tensor from mindspore._extends import cell_attr_register from ..cell import Cell +__all__ = ['Softmax', + 'LogSoftmax', + 'ReLU', + 'ReLU6', + 'Tanh', + 'GELU', + 'Sigmoid', + 'PReLU', + 'get_activation', + 'LeakyReLU', + 'HSigmoid', + 'HSwish', + 'ELU', + ] + class Softmax(Cell): r""" @@ -49,6 +64,7 @@ class Softmax(Cell): Tensor, which has the same type and shape as `x` with values in the range[0,1]. """ + def __init__(self, axis=-1): super(Softmax, self).__init__() self.softmax = P.Softmax(axis) @@ -112,6 +128,7 @@ class ELU(Cell): Tensor, with the same type and shape as the `input_data`. """ + def __init__(self, alpha=1.0): super(ELU, self).__init__() self.elu = P.Elu(alpha) @@ -135,6 +152,7 @@ class ReLU(Cell): Tensor, with the same type and shape as the `input_data`. """ + def __init__(self): super(ReLU, self).__init__() self.relu = P.ReLU() @@ -158,6 +176,7 @@ class ReLU6(Cell): Tensor, which has the same type with `input_data`. """ + def __init__(self): super(ReLU6, self).__init__() self.relu6 = P.ReLU6() @@ -189,6 +208,7 @@ class LeakyReLU(Cell): Tensor, has the same type and shape with the `input_x`. """ + def __init__(self, alpha=0.2): super(LeakyReLU, self).__init__() self.greater_equal = P.GreaterEqual() @@ -225,6 +245,7 @@ class Tanh(Cell): Tensor, with the same type and shape as the `input_data`. """ + def __init__(self): super(Tanh, self).__init__() self.tanh = P.Tanh() @@ -250,6 +271,7 @@ class GELU(Cell): Tensor, with the same type and shape as the `input_data`. """ + def __init__(self): super(GELU, self).__init__() self.gelu = P.Gelu() @@ -274,6 +296,7 @@ class Sigmoid(Cell): Tensor, with the same type and shape as the `input_data`. """ + def __init__(self): super(Sigmoid, self).__init__() self.sigmoid = P.Sigmoid() @@ -352,6 +375,7 @@ class HSwish(Cell): Tensor, with the same type and shape as the `input_data`. """ + def __init__(self): super(HSwish, self).__init__() self.hswish = P.HSwish() @@ -380,6 +404,7 @@ class HSigmoid(Cell): Tensor, with the same type and shape as the `input_data`. """ + def __init__(self): super(HSigmoid, self).__init__() self.hsigmoid = P.HSigmoid() diff --git a/mindspore/nn/layer/basic.py b/mindspore/nn/layer/basic.py index 9c8de85a68..95f1d4d284 100644 --- a/mindspore/nn/layer/basic.py +++ b/mindspore/nn/layer/basic.py @@ -29,6 +29,7 @@ from ..cell import Cell from .activation import get_activation from ..._checkparam import Validator as validator +__all__ = ['Dropout', 'Flatten', 'Dense', 'ClipByNorm', 'Norm', 'OneHot', 'Pad', 'Unfold'] class Dropout(Cell): r""" diff --git a/mindspore/nn/layer/container.py b/mindspore/nn/layer/container.py index 709b3ef8ef..5cb378ae6c 100644 --- a/mindspore/nn/layer/container.py +++ b/mindspore/nn/layer/container.py @@ -15,9 +15,9 @@ """container""" from collections import OrderedDict from abc import abstractmethod, ABCMeta - from ..cell import Cell +__all__ = ['SequentialCell', 'CellList'] def _valid_index(cell_num, index): if not isinstance(index, int): diff --git a/mindspore/nn/layer/conv.py b/mindspore/nn/layer/conv.py index 730b5e3398..017fb0fcd7 100644 --- a/mindspore/nn/layer/conv.py +++ b/mindspore/nn/layer/conv.py @@ -21,6 +21,7 @@ from mindspore._checkparam import check_bool, twice, check_int_positive, check_i from mindspore._extends import cell_attr_register from ..cell import Cell +__all__ = ['Conv2d', 'Conv2dTranspose'] class _Conv(Cell): """ diff --git a/mindspore/nn/layer/embedding.py b/mindspore/nn/layer/embedding.py index 24b94f2f3c..5df38b6845 100755 --- a/mindspore/nn/layer/embedding.py +++ b/mindspore/nn/layer/embedding.py @@ -21,6 +21,7 @@ from mindspore.common.initializer import initializer from ..cell import Cell from ..._checkparam import Validator as validator +__all__ = ['Embedding'] class Embedding(Cell): r""" diff --git a/mindspore/nn/layer/image.py b/mindspore/nn/layer/image.py index f06c5fd30a..6a21820788 100644 --- a/mindspore/nn/layer/image.py +++ b/mindspore/nn/layer/image.py @@ -23,6 +23,7 @@ from mindspore._checkparam import Validator as validator from mindspore._checkparam import Rel from ..cell import Cell +__all__ = ['ImageGradients', 'SSIM', 'PSNR'] class ImageGradients(Cell): r""" diff --git a/mindspore/nn/layer/lstm.py b/mindspore/nn/layer/lstm.py index bdc49739ac..06ad5154ab 100755 --- a/mindspore/nn/layer/lstm.py +++ b/mindspore/nn/layer/lstm.py @@ -19,6 +19,7 @@ from mindspore.common.parameter import Parameter from mindspore.common.initializer import initializer from mindspore._checkparam import Validator as validator +__all__ = ['LSTM'] class LSTM(Cell): r""" diff --git a/mindspore/nn/layer/normalization.py b/mindspore/nn/layer/normalization.py index d133e9f0cb..7144c647f1 100644 --- a/mindspore/nn/layer/normalization.py +++ b/mindspore/nn/layer/normalization.py @@ -29,6 +29,8 @@ from mindspore._checkparam import check_int_positive from ..cell import Cell +__all__ = ['BatchNorm1d', 'BatchNorm2d', 'LayerNorm', 'GroupNorm', 'GlobalBatchNorm'] + class _BatchNorm(Cell): """Batch Normalization base class.""" @cell_attr_register diff --git a/mindspore/nn/layer/pooling.py b/mindspore/nn/layer/pooling.py index b0c0816ca4..927af43225 100644 --- a/mindspore/nn/layer/pooling.py +++ b/mindspore/nn/layer/pooling.py @@ -21,6 +21,7 @@ from ... import context from ..cell import Cell from ..._checkparam import Rel +__all__ = ['AvgPool2d', 'MaxPool2d', 'AvgPool1d'] class _PoolNd(Cell): """N-D AvgPool""" diff --git a/mindspore/nn/layer/_quant.py b/mindspore/nn/layer/quant.py similarity index 100% rename from mindspore/nn/layer/_quant.py rename to mindspore/nn/layer/quant.py