Browse Source

!8708 fix fix SmoothL1lossGrad beta attr problem.

From: @liangchenghui
Reviewed-by: @kingxian
Signed-off-by: @kingxian
tags/v1.1.0
mindspore-ci-bot Gitee 5 years ago
parent
commit
5a203d08d0
2 changed files with 6 additions and 3 deletions
  1. +1
    -1
      mindspore/ops/operations/_grad_ops.py
  2. +5
    -2
      mindspore/ops/operations/array_ops.py

+ 1
- 1
mindspore/ops/operations/_grad_ops.py View File

@@ -1561,7 +1561,7 @@ class SmoothL1LossGrad(PrimitiveWithInfer):

@prim_attr_register
def __init__(self, beta=1.0):
pass
self.add_prim_attr('sigma', beta)

def infer_shape(self, prediction, target, dloss):
validator.check('prediction shape', prediction, 'target shape', target, Rel.EQ, self.name)


+ 5
- 2
mindspore/ops/operations/array_ops.py View File

@@ -2159,8 +2159,8 @@ class Slice(PrimitiveWithInfer):

Inputs:
- **x** (Tensor): The target tensor.
- **begin** (tuple): The beginning of the slice. Only constant value is allowed.
- **size** (tuple): The size of the slice. Only constant value is allowed.
- **begin** (tuple, list): The beginning of the slice. Only constant value is allowed.
- **size** (tuple, list): The size of the slice. Only constant value is allowed.

Outputs:
Tensor, the shape is : input `size`, the data type is the same as input `x`.
@@ -2189,10 +2189,13 @@ class Slice(PrimitiveWithInfer):
return {'shape': None,
'dtype': x['dtype'],
'value': None}
validator.check_value_type("input begin", begin_v, [tuple, list], self.name)
validator.check_value_type("input size", size_v, [tuple, list], self.name)
for key, value in zip(('begin', 'size'), (begin_v, size_v)):
validator.check(f'len of {key}', len(value),
'len x\'s dim', x_shp_len)
for i in range(x_shp_len):
validator.check_positive_int(size_v[i], f'input size[{i}]')
if x_shape[i] < begin_v[i] + size_v[i]:
y = begin_v[i] + size_v[i]
raise ValueError("For '%s' slice shape can not bigger than orign shape %d, %d." %


Loading…
Cancel
Save