diff --git a/mindspore/lite/src/ops/bias_add.cc b/mindspore/lite/src/ops/bias_add.cc index 9368316635..849738cbb3 100644 --- a/mindspore/lite/src/ops/bias_add.cc +++ b/mindspore/lite/src/ops/bias_add.cc @@ -48,7 +48,7 @@ int BiasAdd::UnPackAttr(const Primitive &prim, const std::vector &in return RET_ERROR; } if (prim.GetAttr("axis") == nullptr) { - MS_LOG(WARNING) << "get axis failed"; + MS_LOG(INFO) << "BiasAdd's attr axis is set to default"; attr->axis = {1}; } else { attr->axis = GetValue>(prim.GetAttr("axis")); diff --git a/mindspore/lite/src/ops/lstm.cc b/mindspore/lite/src/ops/lstm.cc index 0dedb39a7c..b0f4d7771c 100644 --- a/mindspore/lite/src/ops/lstm.cc +++ b/mindspore/lite/src/ops/lstm.cc @@ -84,6 +84,8 @@ int Lstm::InferShape(std::vector inputs_, std::vector output out_shape[2] = hidden_size; if (GetBidirection()) { out_shape.insert(out_shape.begin() + 1, 2); + } else { + out_shape.insert(out_shape.begin() + 1, 1); } output->set_shape(out_shape); // set hidden state, cell state diff --git a/mindspore/lite/src/ops/power.cc b/mindspore/lite/src/ops/power.cc index f99bfa8124..5f44f2d239 100644 --- a/mindspore/lite/src/ops/power.cc +++ b/mindspore/lite/src/ops/power.cc @@ -56,19 +56,19 @@ int Power::UnPackAttr(const Primitive &prim, const std::vector &inpu } if (prim.GetAttr("scale") == nullptr) { - MS_LOG(WARNING) << "get scale failed"; + MS_LOG(INFO) << "Power's attr scale is set to default"; attr->scale = 1.0f; } else { attr->scale = GetValue(prim.GetAttr("scale")); } if (prim.GetAttr("power") == nullptr) { - MS_LOG(WARNING) << "get power failed"; + MS_LOG(INFO) << "Power's attr power is set to default"; attr->power = 1.0f; } else { attr->power = GetValue(prim.GetAttr("power")); } if (prim.GetAttr("shift") == nullptr) { - MS_LOG(WARNING) << "get shift failed"; + MS_LOG(INFO) << "Power's attr shift is set to default"; attr->shift = 0; } else { attr->shift = GetValue(prim.GetAttr("shift")); diff --git a/mindspore/lite/src/ops/squeeze.cc b/mindspore/lite/src/ops/squeeze.cc index 700bee9494..3e3d8589a7 100644 --- a/mindspore/lite/src/ops/squeeze.cc +++ b/mindspore/lite/src/ops/squeeze.cc @@ -47,7 +47,7 @@ int Squeeze::UnPackAttr(const Primitive &prim, const std::vector &in return RET_ERROR; } if (prim.GetAttr("axis") == nullptr) { - MS_LOG(WARNING) << "get axis failed"; + MS_LOG(INFO) << "Squeeze's attr xis is set to default"; attr->axis = {0}; } else { int axis = GetValue(prim.GetAttr("axis")); diff --git a/mindspore/lite/src/ops/tile.cc b/mindspore/lite/src/ops/tile.cc index 3247b2fdd5..49e907c9ab 100644 --- a/mindspore/lite/src/ops/tile.cc +++ b/mindspore/lite/src/ops/tile.cc @@ -53,7 +53,7 @@ int Tile::UnPackAttr(const Primitive &prim, const std::vector &input return RET_ERROR; } if (prim.GetAttr("dims") == nullptr) { - MS_LOG(WARNING) << "get dims failed"; + MS_LOG(INFO) << "Tile's attr dims is set to default"; attr->dims = {1}; } else { attr->dims = GetValue>(prim.GetAttr("dims")); diff --git a/mindspore/lite/src/ops/transpose.cc b/mindspore/lite/src/ops/transpose.cc index 37bf1c4ab4..1c1dc46ec3 100644 --- a/mindspore/lite/src/ops/transpose.cc +++ b/mindspore/lite/src/ops/transpose.cc @@ -124,6 +124,7 @@ int Transpose::InferShape(std::vector inputs_, std::vector o if (!GetInferFlag()) { return RET_OK; } + MS_ASSERT(inputs_.size() == kSingleNum || inputs_.size() == kDoubleNum); MS_ASSERT(outputs_.size() == kSingleNum); int conjugate = GetConjugate(); diff --git a/mindspore/lite/src/runtime/kernel/arm/fp32/transpose_fp32.cc b/mindspore/lite/src/runtime/kernel/arm/fp32/transpose_fp32.cc index e9a736bede..08cf7df921 100644 --- a/mindspore/lite/src/runtime/kernel/arm/fp32/transpose_fp32.cc +++ b/mindspore/lite/src/runtime/kernel/arm/fp32/transpose_fp32.cc @@ -116,6 +116,7 @@ int TransposeFp32Run(void *cdata, int task_id) { } int TransposeCPUKernel::Run() { + MS_ASSERT(in_tensors_.size() == 1 || in_tensors_.size() == 2); MS_ASSERT(out_tensors_.size() == 1); auto &in_tensor = in_tensors_.front(); auto &out_tensor = out_tensors_.front(); diff --git a/mindspore/lite/tools/converter/parser/onnx/onnx_arithmetic_operation_parser.cc b/mindspore/lite/tools/converter/parser/onnx/onnx_arithmetic_operation_parser.cc index c1d8214bae..173ddde1b9 100644 --- a/mindspore/lite/tools/converter/parser/onnx/onnx_arithmetic_operation_parser.cc +++ b/mindspore/lite/tools/converter/parser/onnx/onnx_arithmetic_operation_parser.cc @@ -15,7 +15,9 @@ */ #include "tools/converter/parser/onnx/onnx_arithmetic_operation_parser.h" +#include "tools/converter/parser/onnx/onnx_tensor_parser.h" #include +#include namespace mindspore { namespace lite { @@ -130,21 +132,21 @@ STATUS OnnxPowParser::Parse(const onnx::GraphProto &onnx_graph, const onnx::Node } const auto &onnx_pow_power = onnx_node.input(1); - auto nodeIter = - std::find_if(onnx_graph.node().begin(), onnx_graph.node().end(), - [onnx_pow_power](const onnx::NodeProto &proto) { return proto.output(0) == onnx_pow_power; }); - if (nodeIter == onnx_graph.node().end()) { + int index = OnnxTensorParser::GetInstance()->GetTensorCache()->FindTensor(onnx_pow_power); + if (index == -1) { MS_LOG(ERROR) << "can not find node: " << onnx_pow_power; return RET_ERROR; } - const float *pW = nullptr; - for (const auto &attrPower : nodeIter->attribute()) { - if (attrPower.name() == "value") { - const auto &t = attrPower.t(); - pW = reinterpret_cast(t.raw_data().data()); - } + auto pow_attr = OnnxTensorParser::GetInstance()->GetTensorCache()->GetCachedTensor()[index]; + if (std::accumulate(pow_attr->dims.begin(), pow_attr->dims.end(), 1, std::multiplies()) != 1) { + MS_LOG(ERROR) << "the exponent element num is bigger than 1, which don't support now."; + return RET_NOT_SUPPORT; } - attr->power = *pW; + if (pow_attr->data.data() == nullptr) { + MS_LOG(ERROR) << "power's attr pow can't be obtained."; + return RET_INVALID_OP_ATTR; + } + attr->power = *reinterpret_cast(pow_attr->data.data()); attr->scale = 1.0f; attr->shift = 0.0f; op->primitive->value.type = schema::PrimitiveType_Power; diff --git a/mindspore/lite/tools/converter/parser/onnx/onnx_deconv_parser.cc b/mindspore/lite/tools/converter/parser/onnx/onnx_deconv_parser.cc index 24ad60b9fd..729bfcc645 100644 --- a/mindspore/lite/tools/converter/parser/onnx/onnx_deconv_parser.cc +++ b/mindspore/lite/tools/converter/parser/onnx/onnx_deconv_parser.cc @@ -27,7 +27,7 @@ bool OnnxDeConvParser::ParseGroupDeConvolution(const std::unique_ptr deDepthwiseConv2DParam = std::make_unique(); if (deDepthwiseConv2DParam == nullptr) { - MS_LOG(WARNING) << "new op failed"; + MS_LOG(ERROR) << "new op failed"; return false; } deDepthwiseConv2DParam->format = attr->format; diff --git a/mindspore/lite/tools/optimizer/common/gllo_utils.cc b/mindspore/lite/tools/optimizer/common/gllo_utils.cc index c87d6154c6..901377cbdb 100644 --- a/mindspore/lite/tools/optimizer/common/gllo_utils.cc +++ b/mindspore/lite/tools/optimizer/common/gllo_utils.cc @@ -374,8 +374,7 @@ schema::PrimitiveType GetCNodeType(const BaseRef &n) { } else if (utils::isa(n)) { value_node = utils::cast(n); } else { - MS_LOG(ERROR) << "only value node or cnode has type"; - lite::ReturnCode::GetSingleReturnCode()->UpdateReturnCode(lite::RET_INVALID_OP_ATTR); + MS_LOG(INFO) << "only value node or cnode has type"; return schema::PrimitiveType_NONE; } if (value_node == nullptr) {