Browse Source

!3131 hange shape and dtype of tensor from interface to attr

Merge pull request !3131 from zhangbuxue/change_shape_and_dtype_of_tensor_from_interface_to_attr
tags/v0.6.0-beta
mindspore-ci-bot Gitee 5 years ago
parent
commit
e16dd2a6c4
4 changed files with 8 additions and 8 deletions
  1. +1
    -1
      mindspore/nn/graph_kernels/graph_kernels.py
  2. +3
    -3
      mindspore/nn/layer/quant.py
  3. +3
    -3
      model_zoo/gat/src/gat.py
  4. +1
    -1
      tests/st/pynative/test_ops.py

+ 1
- 1
mindspore/nn/graph_kernels/graph_kernels.py View File

@@ -1020,7 +1020,7 @@ class LayerNorm(Cell):

Examples:
>>> x = Tensor(np.ones([20, 5, 10, 10]), mindspore.float32)
>>> shape1 = x.shape()[1:]
>>> shape1 = x.shape[1:]
>>> m = G.LayerNorm(shape1, begin_norm_axis=1, begin_params_axis=1)
>>> m(x)
"""


+ 3
- 3
mindspore/nn/layer/quant.py View File

@@ -746,8 +746,8 @@ class DenseQuant(Cell):
self.has_bias = check_bool(has_bias)

if isinstance(weight_init, Tensor):
if weight_init.dim() != 2 or weight_init.shape()[0] != out_channels or \
weight_init.shape()[1] != in_channels:
if weight_init.dim() != 2 or weight_init.shape[0] != out_channels or \
weight_init.shape[1] != in_channels:
raise ValueError("weight_init shape error")

self.weight = Parameter(initializer(
@@ -755,7 +755,7 @@ class DenseQuant(Cell):

if self.has_bias:
if isinstance(bias_init, Tensor):
if bias_init.dim() != 1 or bias_init.shape()[0] != out_channels:
if bias_init.dim() != 1 or bias_init.shape[0] != out_channels:
raise ValueError("bias_init shape error")

self.bias = Parameter(initializer(


+ 3
- 3
model_zoo/gat/src/gat.py View File

@@ -77,15 +77,15 @@ class GNNFeatureTransform(nn.Cell):
self.has_bias = check_bool(has_bias)

if isinstance(weight_init, Tensor):
if weight_init.dim() != 2 or weight_init.shape()[0] != out_channels or \
weight_init.shape()[1] != in_channels:
if weight_init.dim() != 2 or weight_init.shape[0] != out_channels or \
weight_init.shape[1] != in_channels:
raise ValueError("weight_init shape error")

self.weight = Parameter(initializer(weight_init, [out_channels, in_channels]), name="weight")

if self.has_bias:
if isinstance(bias_init, Tensor):
if bias_init.dim() != 1 or bias_init.shape()[0] != out_channels:
if bias_init.dim() != 1 or bias_init.shape[0] != out_channels:
raise ValueError("bias_init shape error")

self.bias = Parameter(initializer(bias_init, [out_channels]), name="bias")


+ 1
- 1
tests/st/pynative/test_ops.py View File

@@ -28,4 +28,4 @@ def test_cast():
type_dst = ms.float32
cast = P.Cast()
result = cast(input_x, type_dst)
assert result.dtype() == type_dst
assert result.dtype == type_dst

Loading…
Cancel
Save