From 12e6f32ae19bffdf21f0294c1bb06f2c2c1ded73 Mon Sep 17 00:00:00 2001 From: dinglongwei Date: Wed, 27 Jan 2021 17:26:56 +0800 Subject: [PATCH] update timedistributed document --- mindspore/nn/layer/timedistributed.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mindspore/nn/layer/timedistributed.py b/mindspore/nn/layer/timedistributed.py index 2b40934bb1..c62a3d11fc 100644 --- a/mindspore/nn/layer/timedistributed.py +++ b/mindspore/nn/layer/timedistributed.py @@ -71,11 +71,8 @@ class TimeDistributed(Cell): time_axis(int): The axis of time_step. reshape_with_axis(int): The axis which time_axis will be reshaped with. Default: 'None'. - Raises: - TypeError: If cell is not a Cell or Primitive. - - inputs: - -**input**(Tensor)-Tensor of shape: math:'(N, T, *)' + Inputs: + - **input** (Tensor) - Tensor of shape :math:`(N, T, *)`. Outputs: Tensor of shape: math:'(N, T, *)' @@ -83,10 +80,13 @@ class TimeDistributed(Cell): Supported Platforms: ``Ascend`` ``GPU`` ``CPU`` + Raises: + TypeError: If layer is not a Cell or Primitive. + Examples: >>> input = Tensor(np.random.random([32, 10, 3]), mindspore.float32) >>> dense = nn.Dense(3, 6) - >>> net = TimeDistributed(dense, time_axis=1, reshape_with_axis=0) + >>> net = nn.TimeDistributed(dense, time_axis=1, reshape_with_axis=0) >>> output = net(input) >>> print(output.shape) (32, 10, 6)