| @@ -43,7 +43,7 @@ class Invert(Bijector): | |||||
| ... return self.invert.forward(x_) | ... return self.invert.forward(x_) | ||||
| >>> forward = Net() | >>> forward = Net() | ||||
| >>> x = np.array([2.0, 3.0, 4.0, 5.0]).astype(np.float32) | >>> x = np.array([2.0, 3.0, 4.0, 5.0]).astype(np.float32) | ||||
| >>> ans = forward(Tensor(x, dtype=dtype.float32)) | |||||
| >>> ans = forward(Tensor(x, dtype=mindspore.float32)) | |||||
| """ | """ | ||||
| def __init__(self, | def __init__(self, | ||||
| @@ -52,13 +52,13 @@ class Gumbel(TransformedDistribution): | |||||
| >>> class Prob(nn.Cell): | >>> class Prob(nn.Cell): | ||||
| ... def __init__(self): | ... def __init__(self): | ||||
| ... super(Prob, self).__init__() | ... super(Prob, self).__init__() | ||||
| ... self.gum = msd.Gumbel(np.array([0.0]), np.array([[1.0], [2.0]]), dtype=dtype.float32) | |||||
| ... self.gum = msd.Gumbel(np.array([0.0]), np.array([[1.0], [2.0]]), dtype=mindspore.float32) | |||||
| ... | ... | ||||
| ... def construct(self, x_): | ... def construct(self, x_): | ||||
| ... return self.gum.prob(x_) | ... return self.gum.prob(x_) | ||||
| >>> value = np.array([1.0, 2.0]).astype(np.float32) | >>> value = np.array([1.0, 2.0]).astype(np.float32) | ||||
| >>> pdf = Prob() | >>> pdf = Prob() | ||||
| >>> output = pdf(Tensor(value, dtype=dtype.float32)) | |||||
| >>> output = pdf(Tensor(value, dtype=mindspore.float32)) | |||||
| """ | """ | ||||
| def __init__(self, | def __init__(self, | ||||
| @@ -48,14 +48,14 @@ class LogNormal(msd.TransformedDistribution): | |||||
| >>> import mindspore.nn as nn | >>> import mindspore.nn as nn | ||||
| >>> import mindspore.nn.probability.distribution as msd | >>> import mindspore.nn.probability.distribution as msd | ||||
| >>> from mindspore import Tensor | >>> from mindspore import Tensor | ||||
| ... class Prob(nn.Cell): | |||||
| ... def __init__(self): | |||||
| ... super(Prob, self).__init__() | |||||
| ... self.ln = msd.LogNormal(np.array([0.3]), np.array([[0.2], [0.4]]), dtype=dtype.float32) | |||||
| ... def construct(self, x_): | |||||
| ... return self.ln.prob(x_) | |||||
| >>> class Prob(nn.Cell): | |||||
| ... def __init__(self): | |||||
| ... super(Prob, self).__init__() | |||||
| ... self.ln = msd.LogNormal(np.array([0.3]), np.array([[0.2], [0.4]]), dtype=mindspore.float32) | |||||
| ... def construct(self, x_): | |||||
| ... return self.ln.prob(x_) | |||||
| >>> pdf = Prob() | >>> pdf = Prob() | ||||
| >>> output = pdf(Tensor([1.0, 2.0], dtype=dtype.float32)) | |||||
| >>> output = pdf(Tensor([1.0, 2.0], dtype=mindspore.float32)) | |||||
| """ | """ | ||||
| def __init__(self, | def __init__(self, | ||||
| @@ -35,7 +35,7 @@ class Poisson(Distribution): | |||||
| name (str): The name of the distribution. Default: 'Poisson'. | name (str): The name of the distribution. Default: 'Poisson'. | ||||
| Supported Platforms: | Supported Platforms: | ||||
| ``Ascend`` ``GPU`` | |||||
| ``Ascend`` | |||||
| Note: | Note: | ||||
| `rate` must be strictly greater than 0. | `rate` must be strictly greater than 0. | ||||
| @@ -82,14 +82,14 @@ class Poisson(Distribution): | |||||
| >>> # Examples of `mean`, `sd`, `mode`, and `var` are similar. | >>> # Examples of `mean`, `sd`, `mode`, and `var` are similar. | ||||
| >>> ans = p1.mean() # return 2 | >>> ans = p1.mean() # return 2 | ||||
| >>> print(ans.shape) | >>> print(ans.shape) | ||||
| () | |||||
| (1,) | |||||
| >>> ans = p1.mean(rate_b) # return 1 / rate_b | >>> ans = p1.mean(rate_b) # return 1 / rate_b | ||||
| >>> print(ans.shape) | >>> print(ans.shape) | ||||
| (3,) | (3,) | ||||
| >>> # `rate` must be passed in during function calls. | >>> # `rate` must be passed in during function calls. | ||||
| >>> ans = p2.mean(rate_a) | >>> ans = p2.mean(rate_a) | ||||
| >>> print(ans.shape) | >>> print(ans.shape) | ||||
| () | |||||
| (1,) | |||||
| >>> # Examples of `sample`. | >>> # Examples of `sample`. | ||||
| >>> # Args: | >>> # Args: | ||||
| >>> # shape (tuple): the shape of the sample. Default: () | >>> # shape (tuple): the shape of the sample. Default: () | ||||
| @@ -58,7 +58,7 @@ class TransformedDistribution(Distribution): | |||||
| >>> import mindspore.nn.probability.bijector as msb | >>> import mindspore.nn.probability.bijector as msb | ||||
| >>> from mindspore import Tensor | >>> from mindspore import Tensor | ||||
| >>> class Net(nn.Cell): | >>> class Net(nn.Cell): | ||||
| ... def __init__(self, shape, dtype=dtype.float32, seed=0, name='transformed_distribution'): | |||||
| ... def __init__(self, shape, dtype=mindspore.float32, seed=0, name='transformed_distribution'): | |||||
| ... super(Net, self).__init__() | ... super(Net, self).__init__() | ||||
| ... # create TransformedDistribution distribution | ... # create TransformedDistribution distribution | ||||
| ... self.exp = msb.Exp() | ... self.exp = msb.Exp() | ||||
| @@ -73,7 +73,7 @@ class TransformedDistribution(Distribution): | |||||
| >>> shape = (2, 3) | >>> shape = (2, 3) | ||||
| >>> net = Net(shape=shape, name="LogNormal") | >>> net = Net(shape=shape, name="LogNormal") | ||||
| >>> x = np.array([2.0, 3.0, 4.0, 5.0]).astype(np.float32) | >>> x = np.array([2.0, 3.0, 4.0, 5.0]).astype(np.float32) | ||||
| >>> tx = Tensor(x, dtype=dtype.float32) | |||||
| >>> tx = Tensor(x, dtype=mindspore.float32) | |||||
| >>> cdf, sample = net(tx) | >>> cdf, sample = net(tx) | ||||
| """ | """ | ||||