Browse Source

fix some api comments.

tags/v1.2.0-rc1
zhangyi 5 years ago
parent
commit
5b5cfee0b1
10 changed files with 13 additions and 12 deletions
  1. +1
    -1
      mindspore/common/initializer.py
  2. +2
    -2
      mindspore/common/tensor.py
  3. +1
    -1
      mindspore/dataset/transforms/c_transforms.py
  4. +1
    -1
      mindspore/nn/layer/conv.py
  5. +3
    -3
      mindspore/nn/layer/embedding.py
  6. +2
    -0
      mindspore/nn/optim/momentum.py
  7. +2
    -0
      mindspore/nn/optim/sgd.py
  8. +1
    -0
      mindspore/nn/probability/bijector/softplus.py
  9. +0
    -2
      mindspore/nn/probability/distribution/bernoulli.py
  10. +0
    -2
      mindspore/nn/probability/distribution/exponential.py

+ 1
- 1
mindspore/common/initializer.py View File

@@ -259,7 +259,7 @@ class HeUniform(Initializer):
Initialize the array with He kaiming uniform algorithm, and from a uniform distribution collect samples within
U[-boundary, boundary] The boundary is defined as :

where :math:`boundary = \sqrt{\frac{6}{(1 + a^2) \times \text{fan\_in}}}`.
where :math:`boundary = \sqrt{\frac{6}{(1 + a^2) \times \text{fan_in}}}`.

Args:
negative_slope (int, float, bool): Default: 0, used when nonlinearity is 'leaky_relu'.


+ 2
- 2
mindspore/common/tensor.py View File

@@ -391,8 +391,8 @@ class Tensor(Tensor_):

Args:
slice_index (int): Slice index of a parameter's slices.
It is used when initialize a slice of a parameter, it guarantees that devices
using the same slice can generate the same tensor.
It is used when initialize a slice of a parameter, it guarantees that devices
using the same slice can generate the same tensor.
shape (list[int]): Shape of the slice, it is used when initialize a slice of the parameter.
opt_shard_group(str): Optimizer shard group which is used in auto or semi auto parallel mode
to get one shard of a parameter's slice.


+ 1
- 1
mindspore/dataset/transforms/c_transforms.py View File

@@ -136,7 +136,7 @@ class Slice(cde.SliceOp):
1. :py:obj:`int`: Slice this index only along the first dimension. Negative index is supported.
2. :py:obj:`list(int)`: Slice these indices along the first dimension. Negative indices are supported.
3. :py:obj:`slice`: Slice the generated indices from the slice object along the first dimension.
Similar to `start:stop:step`.
Similar to `start:stop:step`.
4. :py:obj:`None`: Slice the whole dimension. Similar to `:` in Python indexing.
5. :py:obj:`Ellipses`: Slice the whole dimension. Similar to `:` in Python indexing.



+ 1
- 1
mindspore/nn/layer/conv.py View File

@@ -191,7 +191,7 @@ class Conv2d(_Conv):

Inputs:
- **input** (Tensor) - Tensor of shape :math:`(N, C_{in}, H_{in}, W_{in})` \
or `(N, H_{in}, W_{in}, C_{in})`.
or `(N, H_{in}, W_{in}, C_{in})`.

Outputs:
Tensor of shape :math:`(N, C_{out}, H_{out}, W_{out})` or `(N, H_{out}, W_{out}, C_{out})`.


+ 3
- 3
mindspore/nn/layer/embedding.py View File

@@ -365,13 +365,13 @@ class MultiFieldEmbeddingLookup(EmbeddingLookup):
operator (string): The pooling method for the features in one field. Support 'SUM, 'MEAN' and 'MAX'

Inputs:
- **input_indices** (Tensor) - The shape of tensor is :math:`(batch_size, seq_length)`.
- **input_indices** (Tensor) - The shape of tensor is :math:`(batch\_size, seq\_length)`.
Specifies the indices of elements of the original Tensor. Input_indices must be a 2d tensor in
this interface. Type is Int32, Int64.
- **input_values** (Tensor) - The shape of tensor is :math:`(batch_size, seq_length)`.
- **input_values** (Tensor) - The shape of tensor is :math:`(batch\_size, seq\_length)`.
Specifies the weights of elements of the input_indices. The lookout vector will multiply with
the input_values. Type is Float32.
- **field_ids** (Tensor) - The shape of tensor is :math:`(batch_size, seq_length)`.
- **field_ids** (Tensor) - The shape of tensor is :math:`(batch\_size, seq\_length)`.
Specifies the field id of elements of the input_indices. Type is Int32.

Outputs:


+ 2
- 0
mindspore/nn/optim/momentum.py View File

@@ -56,10 +56,12 @@ class Momentum(Optimizer):
v_{t} = v_{t-1} \ast u + gradients

If use_nesterov is True:

.. math::
p_{t} = p_{t-1} - (grad \ast lr + v_{t} \ast u \ast lr)

If use_nesterov is Flase:

.. math::
p_{t} = p_{t-1} - lr \ast v_{t}



+ 2
- 0
mindspore/nn/optim/sgd.py View File

@@ -50,10 +50,12 @@ class SGD(Optimizer):
v_{t+1} = u \ast v_{t} + gradient \ast (1-dampening)

If nesterov is True:

.. math::
p_{t+1} = p_{t} - lr \ast (gradient + u \ast v_{t+1})

If nesterov is Flase:

.. math::
p_{t+1} = p_{t} - lr \ast v_{t+1}



+ 1
- 0
mindspore/nn/probability/bijector/softplus.py View File

@@ -27,6 +27,7 @@ class Softplus(Bijector):

.. math::
Y = \frac{\log(1 + e ^ {kX})}{k}

where k is the sharpness factor.

Args:


+ 0
- 2
mindspore/nn/probability/distribution/bernoulli.py View File

@@ -49,12 +49,10 @@ class Bernoulli(Distribution):
>>> # A Bernoulli distribution can be initialized without arguments.
>>> # In this case, `probs` must be passed in through arguments during function calls.
>>> b2 = msd.Bernoulli(dtype=mindspore.int32)

>>> # Here are some tensors used below for testing
>>> value = Tensor([1, 0, 1], dtype=mindspore.int32)
>>> probs_a = Tensor([0.6], dtype=mindspore.float32)
>>> probs_b = Tensor([0.2, 0.3, 0.4], dtype=mindspore.float32)

>>> # Private interfaces of probability functions corresponding to public interfaces, including
>>> # `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`, are the same as follows.
>>> # Args:


+ 0
- 2
mindspore/nn/probability/distribution/exponential.py View File

@@ -51,12 +51,10 @@ class Exponential(Distribution):
>>> # An Exponential distribution can be initialized without arguments.
>>> # In this case, `rate` must be passed in through `args` during function calls.
>>> e2 = msd.Exponential(dtype=mindspore.float32)

>>> # Here are some tensors used below for testing
>>> value = Tensor([1, 2, 3], dtype=mindspore.float32)
>>> rate_a = Tensor([0.6], dtype=mindspore.float32)
>>> rate_b = Tensor([0.2, 0.5, 0.4], dtype=mindspore.float32)

>>> # Private interfaces of probability functions corresponding to public interfaces, including
>>> # `prob`, `log_prob`, `cdf`, `log_cdf`, `survival_function`, and `log_survival`, are the same as follows.
>>> # Args:


Loading…
Cancel
Save