From: @xu_anyue Reviewed-by: @hangangqiang,@zhanghaibo5 Signed-off-by: @hangangqiangtags/v1.1.0
| @@ -152,9 +152,9 @@ int Conv2DGradFilter::UnPackAttr(const Primitive &prim, const std::vector<AnfNod | |||||
| const int nchw2nhwc[] = {0, 3, 1, 2}; | const int nchw2nhwc[] = {0, 3, 1, 2}; | ||||
| attr->filter_shape.resize(valTuplPtr->size()); | attr->filter_shape.resize(valTuplPtr->size()); | ||||
| for (size_t i = 0; i < valTuplPtr->size(); i++) { | for (size_t i = 0; i < valTuplPtr->size(); i++) { | ||||
| auto elem = dyn_cast<Int32Imm>((*valTuplPtr)[i]); | |||||
| auto elem = (*valTuplPtr)[i]; | |||||
| MS_ASSERT(elem != nullptr); | MS_ASSERT(elem != nullptr); | ||||
| attr->filter_shape[nchw2nhwc[i]] = elem->value(); | |||||
| attr->filter_shape[nchw2nhwc[i]] = CastToInt(elem, false).front(); | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -154,9 +154,9 @@ int Conv2DGradInput::UnPackAttr(const Primitive &prim, const std::vector<AnfNode | |||||
| const int nchw2nhwc[] = {0, 3, 1, 2}; | const int nchw2nhwc[] = {0, 3, 1, 2}; | ||||
| attr->input_shape.resize(valTuplPtr->size()); | attr->input_shape.resize(valTuplPtr->size()); | ||||
| for (size_t i = 0; i < valTuplPtr->size(); i++) { | for (size_t i = 0; i < valTuplPtr->size(); i++) { | ||||
| auto elem = dyn_cast<Int32Imm>((*valTuplPtr)[i]); | |||||
| auto elem = (*valTuplPtr)[i]; | |||||
| MS_ASSERT(elem != nullptr); | MS_ASSERT(elem != nullptr); | ||||
| attr->input_shape[nchw2nhwc[i]] = elem->value(); | |||||
| attr->input_shape[nchw2nhwc[i]] = CastToInt(elem, false).front(); | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -82,9 +82,9 @@ int Reduce::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inp | |||||
| auto valTuplPtr = dyn_cast<ValueTuple>(value); | auto valTuplPtr = dyn_cast<ValueTuple>(value); | ||||
| MS_ASSERT(valTuplPtr != nullptr); | MS_ASSERT(valTuplPtr != nullptr); | ||||
| for (size_t i = 0; i < valTuplPtr->size(); i++) { | for (size_t i = 0; i < valTuplPtr->size(); i++) { | ||||
| auto elem = dyn_cast<Int32Imm>((*valTuplPtr)[i]); | |||||
| auto elem = (*valTuplPtr)[i]; | |||||
| MS_ASSERT(elem != nullptr); | MS_ASSERT(elem != nullptr); | ||||
| attr->axes.emplace_back(elem->value()); | |||||
| attr->axes.emplace_back(CastToInt(elem, false).front()); | |||||
| } | } | ||||
| } else { | } else { | ||||
| int axes_item = CastToInt(value, false).front(); | int axes_item = CastToInt(value, false).front(); | ||||
| @@ -71,9 +71,9 @@ int Slice::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inpu | |||||
| auto valTuplPtr = dyn_cast<ValueTuple>(value); | auto valTuplPtr = dyn_cast<ValueTuple>(value); | ||||
| MS_ASSERT(valTuplPtr != nullptr); | MS_ASSERT(valTuplPtr != nullptr); | ||||
| for (size_t i = 0; i < valTuplPtr->size(); i++) { | for (size_t i = 0; i < valTuplPtr->size(); i++) { | ||||
| auto elem = dyn_cast<Int32Imm>((*valTuplPtr)[i]); | |||||
| auto elem = (*valTuplPtr)[i]; | |||||
| MS_ASSERT(elem != nullptr); | MS_ASSERT(elem != nullptr); | ||||
| attr->begin.emplace_back(elem->value()); | |||||
| attr->begin.emplace_back(CastToInt(elem, false).front()); | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -88,9 +88,9 @@ int Slice::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inpu | |||||
| auto valTuplPtr = dyn_cast<ValueTuple>(value); | auto valTuplPtr = dyn_cast<ValueTuple>(value); | ||||
| MS_ASSERT(valTuplPtr != nullptr); | MS_ASSERT(valTuplPtr != nullptr); | ||||
| for (size_t i = 0; i < valTuplPtr->size(); i++) { | for (size_t i = 0; i < valTuplPtr->size(); i++) { | ||||
| auto elem = dyn_cast<Int32Imm>((*valTuplPtr)[i]); | |||||
| auto elem = (*valTuplPtr)[i]; | |||||
| MS_ASSERT(elem != nullptr); | MS_ASSERT(elem != nullptr); | ||||
| attr->size.emplace_back(elem->value()); | |||||
| attr->size.emplace_back(CastToInt(elem, false).front()); | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -70,9 +70,9 @@ int Tile::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &input | |||||
| auto valTuplPtr = dyn_cast<ValueTuple>(value); | auto valTuplPtr = dyn_cast<ValueTuple>(value); | ||||
| MS_ASSERT(valTuplPtr != nullptr); | MS_ASSERT(valTuplPtr != nullptr); | ||||
| for (size_t i = 0; i < valTuplPtr->size(); i++) { | for (size_t i = 0; i < valTuplPtr->size(); i++) { | ||||
| auto elem = dyn_cast<Int32Imm>((*valTuplPtr)[i]); | |||||
| auto elem = (*valTuplPtr)[i]; | |||||
| MS_ASSERT(elem != nullptr); | MS_ASSERT(elem != nullptr); | ||||
| attr->multiples.emplace_back(elem->value()); | |||||
| attr->multiples.emplace_back(CastToInt(elem, false).front()); | |||||
| } | } | ||||
| } else { | } else { | ||||
| int multiple = CastToInt(value, false).front(); | int multiple = CastToInt(value, false).front(); | ||||
| @@ -408,7 +408,7 @@ int AnfExporter::ConvertInputValueNode(std::shared_ptr<AnfNode> input_anode, | |||||
| node_id_map_[valueNode->fullname_with_scope()] = meta_graphT->allTensors.size(); | node_id_map_[valueNode->fullname_with_scope()] = meta_graphT->allTensors.size(); | ||||
| output_cnode->inputIndex.emplace_back(meta_graphT->allTensors.size()); | output_cnode->inputIndex.emplace_back(meta_graphT->allTensors.size()); | ||||
| meta_graphT->allTensors.emplace_back(std::move(paramTensor)); | meta_graphT->allTensors.emplace_back(std::move(paramTensor)); | ||||
| } else if (value->isa<mindspore::Int32Imm>()) { | |||||
| } else if (value->isa<mindspore::Int32Imm>() || value->isa<mindspore::Int64Imm>()) { | |||||
| auto valueAbstract = valueNode->abstract(); | auto valueAbstract = valueNode->abstract(); | ||||
| auto abstractScalar = utils::cast<abstract::AbstractScalarPtr>(valueAbstract); | auto abstractScalar = utils::cast<abstract::AbstractScalarPtr>(valueAbstract); | ||||
| auto typePtr = abstractScalar->GetTypeTrack(); | auto typePtr = abstractScalar->GetTypeTrack(); | ||||