diff --git a/mindspore/ops/operations/_grad_ops.py b/mindspore/ops/operations/_grad_ops.py index 05c18afab9..1c3bceaec4 100644 --- a/mindspore/ops/operations/_grad_ops.py +++ b/mindspore/ops/operations/_grad_ops.py @@ -1561,7 +1561,7 @@ class SmoothL1LossGrad(PrimitiveWithInfer): @prim_attr_register def __init__(self, beta=1.0): - pass + self.add_prim_attr('sigma', beta) def infer_shape(self, prediction, target, dloss): validator.check('prediction shape', prediction, 'target shape', target, Rel.EQ, self.name) diff --git a/mindspore/ops/operations/array_ops.py b/mindspore/ops/operations/array_ops.py index d0e1111ac7..520f35186c 100644 --- a/mindspore/ops/operations/array_ops.py +++ b/mindspore/ops/operations/array_ops.py @@ -2147,8 +2147,8 @@ class Slice(PrimitiveWithInfer): Inputs: - **x** (Tensor): The target tensor. - - **begin** (tuple): The beginning of the slice. Only constant value is allowed. - - **size** (tuple): The size of the slice. Only constant value is allowed. + - **begin** (tuple, list): The beginning of the slice. Only constant value is allowed. + - **size** (tuple, list): The size of the slice. Only constant value is allowed. Outputs: Tensor, the shape is : input `size`, the data type is the same as input `x`. @@ -2177,10 +2177,13 @@ class Slice(PrimitiveWithInfer): return {'shape': None, 'dtype': x['dtype'], 'value': None} + validator.check_value_type("input begin", begin_v, [tuple, list], self.name) + validator.check_value_type("input size", size_v, [tuple, list], self.name) for key, value in zip(('begin', 'size'), (begin_v, size_v)): validator.check(f'len of {key}', len(value), 'len x\'s dim', x_shp_len) for i in range(x_shp_len): + validator.check_positive_int(size_v[i], f'input size[{i}]') if x_shape[i] < begin_v[i] + size_v[i]: y = begin_v[i] + size_v[i] raise ValueError("For '%s' slice shape can not bigger than orign shape %d, %d." %