Browse Source

fix fix SmoothL1lossGrad beta attr problem.

tags/v1.1.0
liangchenghui 5 years ago
parent
commit
d45bbb8f87
2 changed files with 6 additions and 3 deletions
  1. +1
    -1
      mindspore/ops/operations/_grad_ops.py
  2. +5
    -2
      mindspore/ops/operations/array_ops.py

+ 1
- 1
mindspore/ops/operations/_grad_ops.py View File

@@ -1561,7 +1561,7 @@ class SmoothL1LossGrad(PrimitiveWithInfer):

@prim_attr_register
def __init__(self, beta=1.0):
pass
self.add_prim_attr('sigma', beta)

def infer_shape(self, prediction, target, dloss):
validator.check('prediction shape', prediction, 'target shape', target, Rel.EQ, self.name)


+ 5
- 2
mindspore/ops/operations/array_ops.py View File

@@ -2147,8 +2147,8 @@ class Slice(PrimitiveWithInfer):

Inputs:
- **x** (Tensor): The target tensor.
- **begin** (tuple): The beginning of the slice. Only constant value is allowed.
- **size** (tuple): The size of the slice. Only constant value is allowed.
- **begin** (tuple, list): The beginning of the slice. Only constant value is allowed.
- **size** (tuple, list): The size of the slice. Only constant value is allowed.

Outputs:
Tensor, the shape is : input `size`, the data type is the same as input `x`.
@@ -2177,10 +2177,13 @@ class Slice(PrimitiveWithInfer):
return {'shape': None,
'dtype': x['dtype'],
'value': None}
validator.check_value_type("input begin", begin_v, [tuple, list], self.name)
validator.check_value_type("input size", size_v, [tuple, list], self.name)
for key, value in zip(('begin', 'size'), (begin_v, size_v)):
validator.check(f'len of {key}', len(value),
'len x\'s dim', x_shp_len)
for i in range(x_shp_len):
validator.check_positive_int(size_v[i], f'input size[{i}]')
if x_shape[i] < begin_v[i] + size_v[i]:
y = begin_v[i] + size_v[i]
raise ValueError("For '%s' slice shape can not bigger than orign shape %d, %d." %


Loading…
Cancel
Save