Merge pull request !7077 from chenzhongming/zomi_mastertags/v1.1.0
| @@ -16,13 +16,13 @@ | |||||
| import numpy as np | import numpy as np | ||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| from mindspore.ops import functional as F | from mindspore.ops import functional as F | ||||
| from mindspore.ops import _selected_ops | |||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| from mindspore._extends import cell_attr_register | from mindspore._extends import cell_attr_register | ||||
| from mindspore.ops import _selected_ops | |||||
| from mindspore._checkparam import Validator as validator | |||||
| from ..cell import Cell | from ..cell import Cell | ||||
| from ..._checkparam import Validator as validator | |||||
| __all__ = ['Softmax', | __all__ = ['Softmax', | ||||
| @@ -20,7 +20,6 @@ import mindspore.common.dtype as mstype | |||||
| from mindspore.common.seed import get_seed | from mindspore.common.seed import get_seed | ||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore._checkparam import check_int_positive, check_bool | |||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| from mindspore.ops import functional as F | from mindspore.ops import functional as F | ||||
| from mindspore.ops.functional import identity | from mindspore.ops.functional import identity | ||||
| @@ -28,12 +27,12 @@ from mindspore.ops.operations import _inner_ops as inner | |||||
| from mindspore.ops.primitive import constexpr | from mindspore.ops.primitive import constexpr | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore._extends import cell_attr_register | from mindspore._extends import cell_attr_register | ||||
| from mindspore._checkparam import Rel, Validator as validator, check_int_positive, check_bool | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| from mindspore import context | from mindspore import context | ||||
| from ..cell import Cell | from ..cell import Cell | ||||
| from .activation import get_activation | from .activation import get_activation | ||||
| from ..._checkparam import Validator as validator | |||||
| from ..._checkparam import Rel | |||||
| __all__ = ['Dropout', 'Flatten', 'Dense', 'ClipByNorm', 'Norm', 'OneHot', 'Pad', 'Unfold', | __all__ = ['Dropout', 'Flatten', 'Dense', 'ClipByNorm', 'Norm', 'OneHot', 'Pad', 'Unfold', | ||||
| 'MatrixDiag', 'MatrixDiagPart', 'MatrixSetDiag'] | 'MatrixDiag', 'MatrixDiagPart', 'MatrixSetDiag'] | ||||
| @@ -13,7 +13,6 @@ | |||||
| # limitations under the License. | # limitations under the License. | ||||
| # ============================================================================ | # ============================================================================ | ||||
| """conv""" | """conv""" | ||||
| import numpy as np | import numpy as np | ||||
| from mindspore import log as logger | from mindspore import log as logger | ||||
| from mindspore import context | from mindspore import context | ||||
| @@ -23,8 +22,7 @@ from mindspore.common.parameter import Parameter | |||||
| from mindspore.common.initializer import initializer, Initializer | from mindspore.common.initializer import initializer, Initializer | ||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| from mindspore._checkparam import ParamValidator as validator, Rel | from mindspore._checkparam import ParamValidator as validator, Rel | ||||
| from mindspore._checkparam import Validator | |||||
| from mindspore._checkparam import check_bool, twice, check_int_positive | |||||
| from mindspore._checkparam import check_bool, twice, check_int_positive, Validator | |||||
| from mindspore._extends import cell_attr_register | from mindspore._extends import cell_attr_register | ||||
| from ..cell import Cell | from ..cell import Cell | ||||
| @@ -18,12 +18,11 @@ from mindspore.common.tensor import Tensor | |||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore._checkparam import Validator | |||||
| from mindspore.communication.management import get_group_size | from mindspore.communication.management import get_group_size | ||||
| from mindspore.context import ParallelMode | from mindspore.context import ParallelMode | ||||
| from mindspore.parallel._utils import _get_parallel_mode | from mindspore.parallel._utils import _get_parallel_mode | ||||
| from mindspore._checkparam import Rel, Validator as validator | |||||
| from ..cell import Cell | from ..cell import Cell | ||||
| from ..._checkparam import Validator as validator, Rel | |||||
| __all__ = ['Embedding', 'EmbeddingLookup'] | __all__ = ['Embedding', 'EmbeddingLookup'] | ||||
| @@ -171,7 +170,7 @@ class EmbeddingLookup(Cell): | |||||
| if not isinstance(manual_shapes, tuple): | if not isinstance(manual_shapes, tuple): | ||||
| raise TypeError("manual_shapes type must be tuple(int) cannot be {}!".format(type(manual_shapes))) | raise TypeError("manual_shapes type must be tuple(int) cannot be {}!".format(type(manual_shapes))) | ||||
| for dim in manual_shapes: | for dim in manual_shapes: | ||||
| Validator.check_integer('manul shape dim', dim, 0, Rel.GT, self.cls_name) | |||||
| validator.check_integer('manul shape dim', dim, 0, Rel.GT, self.cls_name) | |||||
| self.gatherv2.add_prim_attr("manual_split", manual_shapes) | self.gatherv2.add_prim_attr("manual_split", manual_shapes) | ||||
| self.embeddinglookup.add_prim_attr("manual_split", manual_shapes) | self.embeddinglookup.add_prim_attr("manual_split", manual_shapes) | ||||
| self.gatherv2.shard(((get_group_size(), 1), (1, get_group_size()))) | self.gatherv2.shard(((get_group_size(), 1), (1, get_group_size()))) | ||||
| @@ -20,8 +20,7 @@ from mindspore.common.tensor import Tensor | |||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| from mindspore.ops import functional as F | from mindspore.ops import functional as F | ||||
| from mindspore.ops.primitive import constexpr | from mindspore.ops.primitive import constexpr | ||||
| from mindspore._checkparam import Validator as validator | |||||
| from mindspore._checkparam import Rel | |||||
| from mindspore._checkparam import Rel, Validator as validator | |||||
| from .conv import Conv2d | from .conv import Conv2d | ||||
| from .container import CellList | from .container import CellList | ||||
| from .pooling import AvgPool2d | from .pooling import AvgPool2d | ||||
| @@ -14,16 +14,14 @@ | |||||
| # ============================================================================ | # ============================================================================ | ||||
| """lstm""" | """lstm""" | ||||
| import math | import math | ||||
| import numpy as np | import numpy as np | ||||
| from mindspore._checkparam import Validator as validator | |||||
| from mindspore._checkparam import Rel, Validator as validator | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| from mindspore.nn.cell import Cell | from mindspore.nn.cell import Cell | ||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| from ..._checkparam import Rel | |||||
| __all__ = ['LSTM', 'LSTMCell'] | __all__ = ['LSTM', 'LSTMCell'] | ||||
| @@ -32,7 +30,7 @@ class LSTM(Cell): | |||||
| r""" | r""" | ||||
| LSTM (Long Short-Term Memory) layer. | LSTM (Long Short-Term Memory) layer. | ||||
| Applies a LSTM to the input. | |||||
| Apply LSTM layer to the input. | |||||
| There are two pipelines connecting two consecutive cells in a LSTM model; one is cell state pipeline | There are two pipelines connecting two consecutive cells in a LSTM model; one is cell state pipeline | ||||
| and the other is hidden state pipeline. Denote two consecutive time nodes as :math:`t-1` and :math:`t`. | and the other is hidden state pipeline. Denote two consecutive time nodes as :math:`t-1` and :math:`t`. | ||||
| @@ -88,25 +86,11 @@ class LSTM(Cell): | |||||
| (num_directions * `num_layers`, batch_size, `hidden_size`). | (num_directions * `num_layers`, batch_size, `hidden_size`). | ||||
| Examples: | Examples: | ||||
| >>> class LstmNet(nn.Cell): | |||||
| >>> def __init__(self, input_size, hidden_size, num_layers, has_bias, batch_first, bidirectional): | |||||
| >>> super(LstmNet, self).__init__() | |||||
| >>> self.lstm = nn.LSTM(input_size=input_size, | |||||
| >>> hidden_size=hidden_size, | |||||
| >>> num_layers=num_layers, | |||||
| >>> has_bias=has_bias, | |||||
| >>> batch_first=batch_first, | |||||
| >>> bidirectional=bidirectional, | |||||
| >>> dropout=0.0) | |||||
| >>> | |||||
| >>> def construct(self, inp, h0, c0): | |||||
| >>> return self.lstm(inp, (h0, c0)) | |||||
| >>> | |||||
| >>> net = LstmNet(10, 12, 2, has_bias=True, batch_first=True, bidirectional=False) | |||||
| >>> net = nn.LSTM(10, 12, 2, has_bias=True, batch_first=True, bidirectional=False) | |||||
| >>> input = Tensor(np.ones([3, 5, 10]).astype(np.float32)) | >>> input = Tensor(np.ones([3, 5, 10]).astype(np.float32)) | ||||
| >>> h0 = Tensor(np.ones([1 * 2, 3, 12]).astype(np.float32)) | >>> h0 = Tensor(np.ones([1 * 2, 3, 12]).astype(np.float32)) | ||||
| >>> c0 = Tensor(np.ones([1 * 2, 3, 12]).astype(np.float32)) | >>> c0 = Tensor(np.ones([1 * 2, 3, 12]).astype(np.float32)) | ||||
| >>> output, (hn, cn) = net(input, h0, c0) | |||||
| >>> output, (hn, cn) = net(input, (h0, c0)) | |||||
| """ | """ | ||||
| def __init__(self, | def __init__(self, | ||||
| @@ -159,7 +143,7 @@ class LSTMCell(Cell): | |||||
| r""" | r""" | ||||
| LSTM (Long Short-Term Memory) layer. | LSTM (Long Short-Term Memory) layer. | ||||
| Applies a LSTM layer to the input. | |||||
| Apply LSTM layer to the input. | |||||
| There are two pipelines connecting two consecutive cells in a LSTM model; one is cell state pipeline | There are two pipelines connecting two consecutive cells in a LSTM model; one is cell state pipeline | ||||
| and the other is hidden state pipeline. Denote two consecutive time nodes as :math:`t-1` and :math:`t`. | and the other is hidden state pipeline. Denote two consecutive time nodes as :math:`t-1` and :math:`t`. | ||||
| @@ -224,20 +208,7 @@ class LSTMCell(Cell): | |||||
| - **state** - reserved | - **state** - reserved | ||||
| Examples: | Examples: | ||||
| >>> class LstmNet(nn.Cell): | |||||
| >>> def __init__(self, input_size, hidden_size, has_bias, batch_first, bidirectional): | |||||
| >>> super(LstmNet, self).__init__() | |||||
| >>> self.lstm = nn.LSTMCell(input_size=input_size, | |||||
| >>> hidden_size=hidden_size, | |||||
| >>> has_bias=has_bias, | |||||
| >>> batch_first=batch_first, | |||||
| >>> bidirectional=bidirectional, | |||||
| >>> dropout=0.0) | |||||
| >>> | |||||
| >>> def construct(self, inp, h, c, w): | |||||
| >>> return self.lstm(inp, h, c, w) | |||||
| >>> | |||||
| >>> net = LstmNet(10, 12, has_bias=True, batch_first=True, bidirectional=False) | |||||
| >>> net = nn.LSTMCell(10, 12, has_bias=True, batch_first=True, bidirectional=False) | |||||
| >>> input = Tensor(np.ones([3, 5, 10]).astype(np.float32)) | >>> input = Tensor(np.ones([3, 5, 10]).astype(np.float32)) | ||||
| >>> h = Tensor(np.ones([1, 3, 12]).astype(np.float32)) | >>> h = Tensor(np.ones([1, 3, 12]).astype(np.float32)) | ||||
| >>> c = Tensor(np.ones([1, 3, 12]).astype(np.float32)) | >>> c = Tensor(np.ones([1, 3, 12]).astype(np.float32)) | ||||
| @@ -21,8 +21,7 @@ from mindspore.common.tensor import Tensor | |||||
| from mindspore.ops.primitive import constexpr | from mindspore.ops.primitive import constexpr | ||||
| from ..cell import Cell | from ..cell import Cell | ||||
| from ...common import dtype as mstype | from ...common import dtype as mstype | ||||
| from ..._checkparam import Validator as validator | |||||
| from ..._checkparam import Rel | |||||
| from ..._checkparam import Rel, Validator as validator | |||||
| __all__ = ['ReduceLogSumExp', 'Range', 'LinSpace', 'LGamma', 'MatMul'] | __all__ = ['ReduceLogSumExp', 'Range', 'LinSpace', 'LGamma', 'MatMul'] | ||||
| @@ -19,11 +19,10 @@ from mindspore.common.parameter import Parameter | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.ops.primitive import constexpr | from mindspore.ops.primitive import constexpr | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore._checkparam import check_bool, check_typename | |||||
| from mindspore._checkparam import check_bool, check_typename, check_int_positive | |||||
| from mindspore._extends import cell_attr_register | from mindspore._extends import cell_attr_register | ||||
| from mindspore.communication.management import get_group_size, get_rank | from mindspore.communication.management import get_group_size, get_rank | ||||
| from mindspore.communication import management | from mindspore.communication import management | ||||
| from mindspore._checkparam import check_int_positive | |||||
| from mindspore.ops import _selected_ops | from mindspore.ops import _selected_ops | ||||
| from ..cell import Cell | from ..cell import Cell | ||||
| @@ -15,11 +15,10 @@ | |||||
| """pooling""" | """pooling""" | ||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| from mindspore.ops import functional as F | from mindspore.ops import functional as F | ||||
| from mindspore._checkparam import Validator as validator | |||||
| from mindspore._checkparam import Rel, Validator as validator | |||||
| from mindspore.ops.primitive import constexpr | from mindspore.ops.primitive import constexpr | ||||
| from ... import context | |||||
| import mindspore.context as context | |||||
| from ..cell import Cell | from ..cell import Cell | ||||
| from ..._checkparam import Rel | |||||
| __all__ = ['AvgPool2d', 'MaxPool2d', 'AvgPool1d'] | __all__ = ['AvgPool2d', 'MaxPool2d', 'AvgPool1d'] | ||||
| @@ -16,7 +16,6 @@ | |||||
| from functools import partial | from functools import partial | ||||
| import numpy as np | import numpy as np | ||||
| from mindspore import nn | from mindspore import nn | ||||
| import mindspore.common.dtype as mstype | import mindspore.common.dtype as mstype | ||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| @@ -24,15 +23,11 @@ from mindspore.ops import functional as F | |||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| from mindspore._checkparam import check_int_positive, check_bool, twice | |||||
| from mindspore._checkparam import Rel | |||||
| from mindspore._checkparam import Rel, check_int_positive, check_bool, twice, ParamValidator as validator | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from .normalization import BatchNorm2d, BatchNorm1d | from .normalization import BatchNorm2d, BatchNorm1d | ||||
| from .activation import get_activation, ReLU, LeakyReLU | from .activation import get_activation, ReLU, LeakyReLU | ||||
| from ..cell import Cell | from ..cell import Cell | ||||
| from . import conv, basic | |||||
| from ..._checkparam import ParamValidator as validator | |||||
| from ...ops.operations import _quant_ops as Q | from ...ops.operations import _quant_ops as Q | ||||
| __all__ = [ | __all__ = [ | ||||
| @@ -127,17 +122,17 @@ class Conv2dBnAct(Cell): | |||||
| after_fake=True): | after_fake=True): | ||||
| super(Conv2dBnAct, self).__init__() | super(Conv2dBnAct, self).__init__() | ||||
| self.conv = conv.Conv2d(in_channels, | |||||
| out_channels, | |||||
| kernel_size=kernel_size, | |||||
| stride=stride, | |||||
| pad_mode=pad_mode, | |||||
| padding=padding, | |||||
| dilation=dilation, | |||||
| group=group, | |||||
| has_bias=has_bias, | |||||
| weight_init=weight_init, | |||||
| bias_init=bias_init) | |||||
| self.conv = nn.Conv2d(in_channels, | |||||
| out_channels, | |||||
| kernel_size=kernel_size, | |||||
| stride=stride, | |||||
| pad_mode=pad_mode, | |||||
| padding=padding, | |||||
| dilation=dilation, | |||||
| group=group, | |||||
| has_bias=has_bias, | |||||
| weight_init=weight_init, | |||||
| bias_init=bias_init) | |||||
| self.has_bn = validator.check_bool("has_bn", has_bn) | self.has_bn = validator.check_bool("has_bn", has_bn) | ||||
| self.has_act = activation is not None | self.has_act = activation is not None | ||||
| self.after_fake = after_fake | self.after_fake = after_fake | ||||
| @@ -200,7 +195,7 @@ class DenseBnAct(Cell): | |||||
| activation=None, | activation=None, | ||||
| after_fake=True): | after_fake=True): | ||||
| super(DenseBnAct, self).__init__() | super(DenseBnAct, self).__init__() | ||||
| self.dense = basic.Dense( | |||||
| self.dense = nn.Dense( | |||||
| in_channels, | in_channels, | ||||
| out_channels, | out_channels, | ||||
| weight_init, | weight_init, | ||||
| @@ -1349,11 +1344,6 @@ class QuantBlock(Cell): | |||||
| Outputs: | Outputs: | ||||
| Tensor of shape :math:`(N, out\_channels)`. | Tensor of shape :math:`(N, out\_channels)`. | ||||
| Examples: | |||||
| >>> net = nn.Dense(3, 4) | |||||
| >>> input = Tensor(np.random.randint(0, 255, [2, 3]), mindspore.float32) | |||||
| >>> net(input) | |||||
| """ | """ | ||||
| def __init__(self, | def __init__(self, | ||||