diff --git a/mindspore/common/tensor.py b/mindspore/common/tensor.py index cec002234b..85ec361caf 100644 --- a/mindspore/common/tensor.py +++ b/mindspore/common/tensor.py @@ -177,6 +177,8 @@ class Tensor(Tensor_): return out def __getitem__(self, index): + if isinstance(index, int) and index >= self.shape[0]: + raise IndexError("index {} is out of bounds for axis 0 with size {}".format(index, self.shape[0])) out = tensor_operator_registry.get('__getitem__')(self, index) return out @@ -318,7 +320,7 @@ class Tensor(Tensor_): Args: shape (Tensor): The input tensor. The shape of input tensor must obey - the broadcasting rule. + the broadcasting rule. Returns: Tensor, has the same dimension as input tensor. diff --git a/mindspore/nn/layer/basic.py b/mindspore/nn/layer/basic.py index 47b8df8b29..a129048918 100644 --- a/mindspore/nn/layer/basic.py +++ b/mindspore/nn/layer/basic.py @@ -775,7 +775,7 @@ class Tril(Cell): def construct(self, x, k=0): assist = tril(x.shape, self.dtype(x), k) - result = self.mul(self.cast(x, mstype.int32), self.cast(assist, mstype.int32)) + result = self.mul(self.cast(x, mstype.float32), self.cast(assist, mstype.float32)) return self.cast(result, self.dtype(x)) @@ -817,7 +817,7 @@ class Triu(Cell): def construct(self, x, k=0): assist = triu(x.shape, self.dtype(x), k) - result = self.mul(self.cast(x, mstype.int32), self.cast(assist, mstype.int32)) + result = self.mul(self.cast(x, mstype.float32), self.cast(assist, mstype.float32)) return self.cast(result, self.dtype(x))