Browse Source

!7077 code format for nn.layer

Merge pull request !7077 from chenzhongming/zomi_master
tags/v1.1.0
mindspore-ci-bot Gitee 5 years ago
parent
commit
c8d8bef9e6
10 changed files with 32 additions and 79 deletions
  1. +2
    -2
      mindspore/nn/layer/activation.py
  2. +2
    -3
      mindspore/nn/layer/basic.py
  3. +1
    -3
      mindspore/nn/layer/conv.py
  4. +2
    -3
      mindspore/nn/layer/embedding.py
  5. +1
    -2
      mindspore/nn/layer/image.py
  6. +7
    -36
      mindspore/nn/layer/lstm.py
  7. +1
    -2
      mindspore/nn/layer/math.py
  8. +1
    -2
      mindspore/nn/layer/normalization.py
  9. +2
    -3
      mindspore/nn/layer/pooling.py
  10. +13
    -23
      mindspore/nn/layer/quant.py

+ 2
- 2
mindspore/nn/layer/activation.py View File

@@ -16,13 +16,13 @@
import numpy as np
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore.ops import _selected_ops
from mindspore.common.parameter import Parameter
from mindspore.common.initializer import initializer
from mindspore.common.tensor import Tensor
from mindspore._extends import cell_attr_register
from mindspore.ops import _selected_ops
from mindspore._checkparam import Validator as validator
from ..cell import Cell
from ..._checkparam import Validator as validator


__all__ = ['Softmax',


+ 2
- 3
mindspore/nn/layer/basic.py View File

@@ -20,7 +20,6 @@ import mindspore.common.dtype as mstype
from mindspore.common.seed import get_seed
from mindspore.common.tensor import Tensor
from mindspore.common.initializer import initializer
from mindspore._checkparam import check_int_positive, check_bool
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore.ops.functional import identity
@@ -28,12 +27,12 @@ from mindspore.ops.operations import _inner_ops as inner
from mindspore.ops.primitive import constexpr
from mindspore.common.parameter import Parameter
from mindspore._extends import cell_attr_register
from mindspore._checkparam import Rel, Validator as validator, check_int_positive, check_bool
from mindspore.common.api import ms_function
from mindspore import context
from ..cell import Cell
from .activation import get_activation
from ..._checkparam import Validator as validator
from ..._checkparam import Rel


__all__ = ['Dropout', 'Flatten', 'Dense', 'ClipByNorm', 'Norm', 'OneHot', 'Pad', 'Unfold',
'MatrixDiag', 'MatrixDiagPart', 'MatrixSetDiag']


+ 1
- 3
mindspore/nn/layer/conv.py View File

@@ -13,7 +13,6 @@
# limitations under the License.
# ============================================================================
"""conv"""

import numpy as np
from mindspore import log as logger
from mindspore import context
@@ -23,8 +22,7 @@ from mindspore.common.parameter import Parameter
from mindspore.common.initializer import initializer, Initializer
from mindspore.common.tensor import Tensor
from mindspore._checkparam import ParamValidator as validator, Rel
from mindspore._checkparam import Validator
from mindspore._checkparam import check_bool, twice, check_int_positive
from mindspore._checkparam import check_bool, twice, check_int_positive, Validator
from mindspore._extends import cell_attr_register
from ..cell import Cell



+ 2
- 3
mindspore/nn/layer/embedding.py View File

@@ -18,12 +18,11 @@ from mindspore.common.tensor import Tensor
from mindspore.ops import operations as P
from mindspore.common.parameter import Parameter
from mindspore.common.initializer import initializer
from mindspore._checkparam import Validator
from mindspore.communication.management import get_group_size
from mindspore.context import ParallelMode
from mindspore.parallel._utils import _get_parallel_mode
from mindspore._checkparam import Rel, Validator as validator
from ..cell import Cell
from ..._checkparam import Validator as validator, Rel

__all__ = ['Embedding', 'EmbeddingLookup']

@@ -171,7 +170,7 @@ class EmbeddingLookup(Cell):
if not isinstance(manual_shapes, tuple):
raise TypeError("manual_shapes type must be tuple(int) cannot be {}!".format(type(manual_shapes)))
for dim in manual_shapes:
Validator.check_integer('manul shape dim', dim, 0, Rel.GT, self.cls_name)
validator.check_integer('manul shape dim', dim, 0, Rel.GT, self.cls_name)
self.gatherv2.add_prim_attr("manual_split", manual_shapes)
self.embeddinglookup.add_prim_attr("manual_split", manual_shapes)
self.gatherv2.shard(((get_group_size(), 1), (1, get_group_size())))


+ 1
- 2
mindspore/nn/layer/image.py View File

@@ -20,8 +20,7 @@ from mindspore.common.tensor import Tensor
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore.ops.primitive import constexpr
from mindspore._checkparam import Validator as validator
from mindspore._checkparam import Rel
from mindspore._checkparam import Rel, Validator as validator
from .conv import Conv2d
from .container import CellList
from .pooling import AvgPool2d


+ 7
- 36
mindspore/nn/layer/lstm.py View File

@@ -14,16 +14,14 @@
# ============================================================================
"""lstm"""
import math

import numpy as np

from mindspore._checkparam import Validator as validator
from mindspore._checkparam import Rel, Validator as validator
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.common.tensor import Tensor
from mindspore.nn.cell import Cell
from mindspore.ops import operations as P
from ..._checkparam import Rel

__all__ = ['LSTM', 'LSTMCell']

@@ -32,7 +30,7 @@ class LSTM(Cell):
r"""
LSTM (Long Short-Term Memory) layer.

Applies a LSTM to the input.
Apply LSTM layer to the input.

There are two pipelines connecting two consecutive cells in a LSTM model; one is cell state pipeline
and the other is hidden state pipeline. Denote two consecutive time nodes as :math:`t-1` and :math:`t`.
@@ -88,25 +86,11 @@ class LSTM(Cell):
(num_directions * `num_layers`, batch_size, `hidden_size`).

Examples:
>>> class LstmNet(nn.Cell):
>>> def __init__(self, input_size, hidden_size, num_layers, has_bias, batch_first, bidirectional):
>>> super(LstmNet, self).__init__()
>>> self.lstm = nn.LSTM(input_size=input_size,
>>> hidden_size=hidden_size,
>>> num_layers=num_layers,
>>> has_bias=has_bias,
>>> batch_first=batch_first,
>>> bidirectional=bidirectional,
>>> dropout=0.0)
>>>
>>> def construct(self, inp, h0, c0):
>>> return self.lstm(inp, (h0, c0))
>>>
>>> net = LstmNet(10, 12, 2, has_bias=True, batch_first=True, bidirectional=False)
>>> net = nn.LSTM(10, 12, 2, has_bias=True, batch_first=True, bidirectional=False)
>>> input = Tensor(np.ones([3, 5, 10]).astype(np.float32))
>>> h0 = Tensor(np.ones([1 * 2, 3, 12]).astype(np.float32))
>>> c0 = Tensor(np.ones([1 * 2, 3, 12]).astype(np.float32))
>>> output, (hn, cn) = net(input, h0, c0)
>>> output, (hn, cn) = net(input, (h0, c0))
"""

def __init__(self,
@@ -159,7 +143,7 @@ class LSTMCell(Cell):
r"""
LSTM (Long Short-Term Memory) layer.

Applies a LSTM layer to the input.
Apply LSTM layer to the input.

There are two pipelines connecting two consecutive cells in a LSTM model; one is cell state pipeline
and the other is hidden state pipeline. Denote two consecutive time nodes as :math:`t-1` and :math:`t`.
@@ -224,20 +208,7 @@ class LSTMCell(Cell):
- **state** - reserved

Examples:
>>> class LstmNet(nn.Cell):
>>> def __init__(self, input_size, hidden_size, has_bias, batch_first, bidirectional):
>>> super(LstmNet, self).__init__()
>>> self.lstm = nn.LSTMCell(input_size=input_size,
>>> hidden_size=hidden_size,
>>> has_bias=has_bias,
>>> batch_first=batch_first,
>>> bidirectional=bidirectional,
>>> dropout=0.0)
>>>
>>> def construct(self, inp, h, c, w):
>>> return self.lstm(inp, h, c, w)
>>>
>>> net = LstmNet(10, 12, has_bias=True, batch_first=True, bidirectional=False)
>>> net = nn.LSTMCell(10, 12, has_bias=True, batch_first=True, bidirectional=False)
>>> input = Tensor(np.ones([3, 5, 10]).astype(np.float32))
>>> h = Tensor(np.ones([1, 3, 12]).astype(np.float32))
>>> c = Tensor(np.ones([1, 3, 12]).astype(np.float32))


+ 1
- 2
mindspore/nn/layer/math.py View File

@@ -21,8 +21,7 @@ from mindspore.common.tensor import Tensor
from mindspore.ops.primitive import constexpr
from ..cell import Cell
from ...common import dtype as mstype
from ..._checkparam import Validator as validator
from ..._checkparam import Rel
from ..._checkparam import Rel, Validator as validator


__all__ = ['ReduceLogSumExp', 'Range', 'LinSpace', 'LGamma', 'MatMul']


+ 1
- 2
mindspore/nn/layer/normalization.py View File

@@ -19,11 +19,10 @@ from mindspore.common.parameter import Parameter
from mindspore.common.initializer import initializer
from mindspore.ops.primitive import constexpr
import mindspore.context as context
from mindspore._checkparam import check_bool, check_typename
from mindspore._checkparam import check_bool, check_typename, check_int_positive
from mindspore._extends import cell_attr_register
from mindspore.communication.management import get_group_size, get_rank
from mindspore.communication import management
from mindspore._checkparam import check_int_positive
from mindspore.ops import _selected_ops
from ..cell import Cell



+ 2
- 3
mindspore/nn/layer/pooling.py View File

@@ -15,11 +15,10 @@
"""pooling"""
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore._checkparam import Validator as validator
from mindspore._checkparam import Rel, Validator as validator
from mindspore.ops.primitive import constexpr
from ... import context
import mindspore.context as context
from ..cell import Cell
from ..._checkparam import Rel

__all__ = ['AvgPool2d', 'MaxPool2d', 'AvgPool1d']



+ 13
- 23
mindspore/nn/layer/quant.py View File

@@ -16,7 +16,6 @@

from functools import partial
import numpy as np

from mindspore import nn
import mindspore.common.dtype as mstype
from mindspore.ops import operations as P
@@ -24,15 +23,11 @@ from mindspore.ops import functional as F
from mindspore.common.parameter import Parameter
from mindspore.common.initializer import initializer
from mindspore.common.tensor import Tensor
from mindspore._checkparam import check_int_positive, check_bool, twice
from mindspore._checkparam import Rel
from mindspore._checkparam import Rel, check_int_positive, check_bool, twice, ParamValidator as validator
import mindspore.context as context

from .normalization import BatchNorm2d, BatchNorm1d
from .activation import get_activation, ReLU, LeakyReLU
from ..cell import Cell
from . import conv, basic
from ..._checkparam import ParamValidator as validator
from ...ops.operations import _quant_ops as Q

__all__ = [
@@ -127,17 +122,17 @@ class Conv2dBnAct(Cell):
after_fake=True):
super(Conv2dBnAct, self).__init__()

self.conv = conv.Conv2d(in_channels,
out_channels,
kernel_size=kernel_size,
stride=stride,
pad_mode=pad_mode,
padding=padding,
dilation=dilation,
group=group,
has_bias=has_bias,
weight_init=weight_init,
bias_init=bias_init)
self.conv = nn.Conv2d(in_channels,
out_channels,
kernel_size=kernel_size,
stride=stride,
pad_mode=pad_mode,
padding=padding,
dilation=dilation,
group=group,
has_bias=has_bias,
weight_init=weight_init,
bias_init=bias_init)
self.has_bn = validator.check_bool("has_bn", has_bn)
self.has_act = activation is not None
self.after_fake = after_fake
@@ -200,7 +195,7 @@ class DenseBnAct(Cell):
activation=None,
after_fake=True):
super(DenseBnAct, self).__init__()
self.dense = basic.Dense(
self.dense = nn.Dense(
in_channels,
out_channels,
weight_init,
@@ -1349,11 +1344,6 @@ class QuantBlock(Cell):

Outputs:
Tensor of shape :math:`(N, out\_channels)`.

Examples:
>>> net = nn.Dense(3, 4)
>>> input = Tensor(np.random.randint(0, 255, [2, 3]), mindspore.float32)
>>> net(input)
"""

def __init__(self,


Loading…
Cancel
Save