diff --git a/mindspore/lite/src/ops/conv2d_grad_filter.cc b/mindspore/lite/src/ops/conv2d_grad_filter.cc index aa403d2e45..b32b540172 100644 --- a/mindspore/lite/src/ops/conv2d_grad_filter.cc +++ b/mindspore/lite/src/ops/conv2d_grad_filter.cc @@ -152,9 +152,9 @@ int Conv2DGradFilter::UnPackAttr(const Primitive &prim, const std::vectorfilter_shape.resize(valTuplPtr->size()); for (size_t i = 0; i < valTuplPtr->size(); i++) { - auto elem = dyn_cast((*valTuplPtr)[i]); + auto elem = (*valTuplPtr)[i]; MS_ASSERT(elem != nullptr); - attr->filter_shape[nchw2nhwc[i]] = elem->value(); + attr->filter_shape[nchw2nhwc[i]] = CastToInt(elem, false).front(); } } } diff --git a/mindspore/lite/src/ops/conv2d_grad_input.cc b/mindspore/lite/src/ops/conv2d_grad_input.cc index 26a85610c4..7c6dd6e520 100644 --- a/mindspore/lite/src/ops/conv2d_grad_input.cc +++ b/mindspore/lite/src/ops/conv2d_grad_input.cc @@ -154,9 +154,9 @@ int Conv2DGradInput::UnPackAttr(const Primitive &prim, const std::vectorinput_shape.resize(valTuplPtr->size()); for (size_t i = 0; i < valTuplPtr->size(); i++) { - auto elem = dyn_cast((*valTuplPtr)[i]); + auto elem = (*valTuplPtr)[i]; MS_ASSERT(elem != nullptr); - attr->input_shape[nchw2nhwc[i]] = elem->value(); + attr->input_shape[nchw2nhwc[i]] = CastToInt(elem, false).front(); } } } diff --git a/mindspore/lite/src/ops/reduce.cc b/mindspore/lite/src/ops/reduce.cc index 205092f20a..786485178e 100644 --- a/mindspore/lite/src/ops/reduce.cc +++ b/mindspore/lite/src/ops/reduce.cc @@ -82,9 +82,9 @@ int Reduce::UnPackAttr(const Primitive &prim, const std::vector &inp auto valTuplPtr = dyn_cast(value); MS_ASSERT(valTuplPtr != nullptr); for (size_t i = 0; i < valTuplPtr->size(); i++) { - auto elem = dyn_cast((*valTuplPtr)[i]); + auto elem = (*valTuplPtr)[i]; MS_ASSERT(elem != nullptr); - attr->axes.emplace_back(elem->value()); + attr->axes.emplace_back(CastToInt(elem, false).front()); } } else { int axes_item = CastToInt(value, false).front(); diff --git a/mindspore/lite/src/ops/slice.cc b/mindspore/lite/src/ops/slice.cc index c36fcce858..8fd6c6c119 100644 --- a/mindspore/lite/src/ops/slice.cc +++ b/mindspore/lite/src/ops/slice.cc @@ -71,9 +71,9 @@ int Slice::UnPackAttr(const Primitive &prim, const std::vector &inpu auto valTuplPtr = dyn_cast(value); MS_ASSERT(valTuplPtr != nullptr); for (size_t i = 0; i < valTuplPtr->size(); i++) { - auto elem = dyn_cast((*valTuplPtr)[i]); + auto elem = (*valTuplPtr)[i]; MS_ASSERT(elem != nullptr); - attr->begin.emplace_back(elem->value()); + attr->begin.emplace_back(CastToInt(elem, false).front()); } } } @@ -88,9 +88,9 @@ int Slice::UnPackAttr(const Primitive &prim, const std::vector &inpu auto valTuplPtr = dyn_cast(value); MS_ASSERT(valTuplPtr != nullptr); for (size_t i = 0; i < valTuplPtr->size(); i++) { - auto elem = dyn_cast((*valTuplPtr)[i]); + auto elem = (*valTuplPtr)[i]; MS_ASSERT(elem != nullptr); - attr->size.emplace_back(elem->value()); + attr->size.emplace_back(CastToInt(elem, false).front()); } } } diff --git a/mindspore/lite/src/ops/tile.cc b/mindspore/lite/src/ops/tile.cc index 303be8ed37..893877e95c 100644 --- a/mindspore/lite/src/ops/tile.cc +++ b/mindspore/lite/src/ops/tile.cc @@ -70,9 +70,9 @@ int Tile::UnPackAttr(const Primitive &prim, const std::vector &input auto valTuplPtr = dyn_cast(value); MS_ASSERT(valTuplPtr != nullptr); for (size_t i = 0; i < valTuplPtr->size(); i++) { - auto elem = dyn_cast((*valTuplPtr)[i]); + auto elem = (*valTuplPtr)[i]; MS_ASSERT(elem != nullptr); - attr->multiples.emplace_back(elem->value()); + attr->multiples.emplace_back(CastToInt(elem, false).front()); } } else { int multiple = CastToInt(value, false).front(); diff --git a/mindspore/lite/tools/anf_exporter/anf_exporter.cc b/mindspore/lite/tools/anf_exporter/anf_exporter.cc index 7ce2a813b1..b94072a575 100644 --- a/mindspore/lite/tools/anf_exporter/anf_exporter.cc +++ b/mindspore/lite/tools/anf_exporter/anf_exporter.cc @@ -408,7 +408,7 @@ int AnfExporter::ConvertInputValueNode(std::shared_ptr input_anode, node_id_map_[valueNode->fullname_with_scope()] = meta_graphT->allTensors.size(); output_cnode->inputIndex.emplace_back(meta_graphT->allTensors.size()); meta_graphT->allTensors.emplace_back(std::move(paramTensor)); - } else if (value->isa()) { + } else if (value->isa() || value->isa()) { auto valueAbstract = valueNode->abstract(); auto abstractScalar = utils::cast(valueAbstract); auto typePtr = abstractScalar->GetTypeTrack();