Browse Source

!15604 fix ci alarm on master

From: @TFbunny
Reviewed-by: @tom__chen,@robingrosman
Signed-off-by: @robingrosman
pull/15604/MERGE
mindspore-ci-bot Gitee 4 years ago
parent
commit
d3df7ec7b5
2 changed files with 11 additions and 11 deletions
  1. +9
    -9
      mindspore/core/abstract/prim_arrays.cc
  2. +2
    -2
      mindspore/core/abstract/prim_nn.cc

+ 9
- 9
mindspore/core/abstract/prim_arrays.cc View File

@@ -403,7 +403,7 @@ AbstractBasePtr InferImplScatterAdd(const AnalysisEnginePtr &, const PrimitivePt
ShapeVector shape = x->shape()->shape(); ShapeVector shape = x->shape()->shape();
ShapeVector min_shape = x->shape()->min_shape(); ShapeVector min_shape = x->shape()->min_shape();
ShapeVector max_shape = x->shape()->max_shape(); ShapeVector max_shape = x->shape()->max_shape();
(void)CheckMinMaxShape(shape, &min_shape, &max_shape);
CheckMinMaxShape(shape, &min_shape, &max_shape);
return std::make_shared<AbstractTensor>(x->element(), std::make_shared<Shape>(shape, min_shape, max_shape)); return std::make_shared<AbstractTensor>(x->element(), std::make_shared<Shape>(shape, min_shape, max_shape));
} }


@@ -417,7 +417,7 @@ AbstractBasePtr InferImplScatterUpdate(const AnalysisEnginePtr &, const Primitiv
ShapeVector shape = x->shape()->shape(); ShapeVector shape = x->shape()->shape();
ShapeVector min_shape = x->shape()->min_shape(); ShapeVector min_shape = x->shape()->min_shape();
ShapeVector max_shape = x->shape()->max_shape(); ShapeVector max_shape = x->shape()->max_shape();
(void)CheckMinMaxShape(shape, &min_shape, &max_shape);
CheckMinMaxShape(shape, &min_shape, &max_shape);
return std::make_shared<AbstractTensor>(x->element(), std::make_shared<Shape>(shape, min_shape, max_shape)); return std::make_shared<AbstractTensor>(x->element(), std::make_shared<Shape>(shape, min_shape, max_shape));
} }


@@ -774,7 +774,7 @@ AbstractBasePtr InferImplTranspose(const AnalysisEnginePtr &, const PrimitivePtr
ShapeVector min_shp; ShapeVector min_shp;
ShapeVector x_max_shp = input->shape()->max_shape(); ShapeVector x_max_shp = input->shape()->max_shape();
ShapeVector x_min_shp = input->shape()->min_shape(); ShapeVector x_min_shp = input->shape()->min_shape();
(void)CheckMinMaxShape(input_shp, &x_min_shp, &x_max_shp);
CheckMinMaxShape(input_shp, &x_min_shp, &x_max_shp);
for (size_t i = 0; i < perm_vec.size(); i++) { for (size_t i = 0; i < perm_vec.size(); i++) {
auto idx = static_cast<size_t>(perm_vec[i]); auto idx = static_cast<size_t>(perm_vec[i]);
result_shp.push_back(input_shp[idx]); result_shp.push_back(input_shp[idx]);
@@ -984,7 +984,7 @@ AbstractBasePtr InferImplConcat(const AnalysisEnginePtr &, const PrimitivePtr &p
int64_t rank_base = SizeToLong(shape_base.size()); int64_t rank_base = SizeToLong(shape_base.size());
ShapeVector min_shape_base = tensor_base->shape()->min_shape(); ShapeVector min_shape_base = tensor_base->shape()->min_shape();
ShapeVector max_shape_base = tensor_base->shape()->max_shape(); ShapeVector max_shape_base = tensor_base->shape()->max_shape();
(void)CheckMinMaxShape(shape_base, &min_shape_base, &max_shape_base);
CheckMinMaxShape(shape_base, &min_shape_base, &max_shape_base);


primitive->set_attr("T", tensor_base->element()->BuildType()); primitive->set_attr("T", tensor_base->element()->BuildType());
primitive->set_attr("inputNums", MakeValue(SizeToLong(tuple_len))); primitive->set_attr("inputNums", MakeValue(SizeToLong(tuple_len)));
@@ -1009,7 +1009,7 @@ AbstractBasePtr InferImplConcat(const AnalysisEnginePtr &, const PrimitivePtr &p
int64_t rank_tensor = SizeToLong(shape_tensor.size()); int64_t rank_tensor = SizeToLong(shape_tensor.size());
ShapeVector min_shape_tensor = tensor->shape()->min_shape(); ShapeVector min_shape_tensor = tensor->shape()->min_shape();
ShapeVector max_shape_tensor = tensor->shape()->max_shape(); ShapeVector max_shape_tensor = tensor->shape()->max_shape();
(void)CheckMinMaxShape(shape_tensor, &min_shape_tensor, &max_shape_tensor);
CheckMinMaxShape(shape_tensor, &min_shape_tensor, &max_shape_tensor);
(void)CheckDtypeSame(op_name, tensor_base, tensor); (void)CheckDtypeSame(op_name, tensor_base, tensor);
if (rank_tensor != rank_base) { if (rank_tensor != rank_base) {
MS_LOG(EXCEPTION) << op_name << " can not concat element " << i << " with the first element: Wrong Rank"; MS_LOG(EXCEPTION) << op_name << " can not concat element " << i << " with the first element: Wrong Rank";
@@ -1033,7 +1033,7 @@ AbstractBasePtr InferImplConcat(const AnalysisEnginePtr &, const PrimitivePtr &p
auto shape = ret->shape()->shape(); auto shape = ret->shape()->shape();
auto min_shape = ret->shape()->min_shape(); auto min_shape = ret->shape()->min_shape();
auto max_shape = ret->shape()->max_shape(); auto max_shape = ret->shape()->max_shape();
(void)CheckMinMaxShape(shape, &min_shape, &max_shape);
CheckMinMaxShape(shape, &min_shape, &max_shape);
shape[axis_value] = all_shp; shape[axis_value] = all_shp;
min_shape[axis_value] = min_all_shp; min_shape[axis_value] = min_all_shp;
max_shape[axis_value] = max_all_shp; max_shape[axis_value] = max_all_shp;
@@ -1107,13 +1107,13 @@ AbstractBasePtr InferImplArgMaxWithValue(const AnalysisEnginePtr &, const Primit
}; };
// main calculate shape func // main calculate shape func
auto cal_shape = [axis, keep_dims_value, check_axis](ShapeVector &shape, const ShapeVector &x_shape) -> void { auto cal_shape = [axis, keep_dims_value, check_axis](ShapeVector &shape, const ShapeVector &x_shape) -> void {
shape.insert(shape.end(), x_shape.begin(), x_shape.end());
(void)shape.insert(shape.end(), x_shape.begin(), x_shape.end());
auto axis_value = GetValue<int64_t>(axis); auto axis_value = GetValue<int64_t>(axis);
check_axis(axis_value, x_shape.size()); check_axis(axis_value, x_shape.size());
if (keep_dims_value) { if (keep_dims_value) {
shape[axis_value] = 1; shape[axis_value] = 1;
} else { } else {
shape.erase(std::begin(shape) + axis_value);
(void)shape.erase(std::begin(shape) + axis_value);
} }
}; };
ShapeVector shape = {}; ShapeVector shape = {};
@@ -1122,7 +1122,7 @@ AbstractBasePtr InferImplArgMaxWithValue(const AnalysisEnginePtr &, const Primit
ShapeVector x_shape = x->shape()->shape(); ShapeVector x_shape = x->shape()->shape();
ShapeVector x_min_shape = x->shape()->min_shape(); ShapeVector x_min_shape = x->shape()->min_shape();
ShapeVector x_max_shape = x->shape()->max_shape(); ShapeVector x_max_shape = x->shape()->max_shape();
(void)CheckMinMaxShape(x_shape, &x_min_shape, &x_max_shape);
CheckMinMaxShape(x_shape, &x_min_shape, &x_max_shape);
cal_shape(shape, x_shape); cal_shape(shape, x_shape);
cal_shape(min_shape, x_min_shape); cal_shape(min_shape, x_min_shape);
cal_shape(max_shape, x_max_shape); cal_shape(max_shape, x_max_shape);


+ 2
- 2
mindspore/core/abstract/prim_nn.cc View File

@@ -382,7 +382,7 @@ AbstractBasePtr InferImplBiasAdd(const AnalysisEnginePtr &, const PrimitivePtr &
MS_LOG(EXCEPTION) << "BiasAdd shape error, data format is " << data_format MS_LOG(EXCEPTION) << "BiasAdd shape error, data format is " << data_format
<< ", got bias_shape[0]: " << bias_shape[0] << ", x_channel: " << x_channel << "."; << ", got bias_shape[0]: " << bias_shape[0] << ", x_channel: " << x_channel << ".";
} }
(void)CheckMinMaxShape(x_shape, &x_min_shape, &x_max_shape);
CheckMinMaxShape(x_shape, &x_min_shape, &x_max_shape);
return std::make_shared<AbstractTensor>(x->element(), std::make_shared<Shape>(x_shape, x_min_shape, x_max_shape)); return std::make_shared<AbstractTensor>(x->element(), std::make_shared<Shape>(x_shape, x_min_shape, x_max_shape));
} }


@@ -537,7 +537,7 @@ AbstractBasePtr InferImplDropout(const AnalysisEnginePtr &, const PrimitivePtr &
ShapeVector shape = x->shape()->shape(); ShapeVector shape = x->shape()->shape();
ShapeVector min_shape = x->shape()->min_shape(); ShapeVector min_shape = x->shape()->min_shape();
ShapeVector max_shape = x->shape()->max_shape(); ShapeVector max_shape = x->shape()->max_shape();
(void)CheckMinMaxShape(shape, &min_shape, &max_shape);
CheckMinMaxShape(shape, &min_shape, &max_shape);
auto output_shape = auto output_shape =
std::make_shared<AbstractTensor>(x->element(), std::make_shared<Shape>(shape, min_shape, max_shape)); std::make_shared<AbstractTensor>(x->element(), std::make_shared<Shape>(shape, min_shape, max_shape));
AbstractBasePtrList ret = {output_shape, output_shape}; AbstractBasePtrList ret = {output_shape, output_shape};


Loading…
Cancel
Save