From 3d8b90e8ae86cbe9c9a825807cbb7a9a0c9aa326 Mon Sep 17 00:00:00 2001 From: cjh9368 Date: Tue, 10 Nov 2020 17:15:40 +0800 Subject: [PATCH] rewrite caffe parsers --- .../lite/tools/anf_exporter/anf_exporter.cc | 10 +- .../tools/converter/graphdef_transform.cc | 14 +- .../graph/batchnorm_convert_scale_pass.cc | 25 +- .../graph/batchnorm_convert_scale_pass.h | 4 + .../parser/caffe/caffe_argmax_parser.cc | 26 +- .../parser/caffe/caffe_argmax_parser.h | 5 +- .../parser/caffe/caffe_batchnorm_parser.cc | 63 +- .../parser/caffe/caffe_batchnorm_parser.h | 4 +- .../parser/caffe/caffe_concat_parser.cc | 30 +- .../parser/caffe/caffe_concat_parser.h | 9 +- .../converter/parser/caffe/caffe_converter.cc | 7 +- .../converter/parser/caffe/caffe_converter.h | 1 - .../parser/caffe/caffe_convolution_parser.cc | 55 +- .../parser/caffe/caffe_convolution_parser.h | 5 +- .../parser/caffe/caffe_crop_parser.cc | 30 +- .../parser/caffe/caffe_crop_parser.h | 3 +- .../caffe/caffe_deconvolution_parser.cc | 55 +- .../parser/caffe/caffe_deconvolution_parser.h | 5 +- .../parser/caffe/caffe_eltwise_parser.cc | 36 +- .../parser/caffe/caffe_eltwise_parser.h | 3 +- .../parser/caffe/caffe_elu_parser.cc | 26 +- .../converter/parser/caffe/caffe_elu_parser.h | 3 +- .../parser/caffe/caffe_exp_parser.cc | 25 +- .../converter/parser/caffe/caffe_exp_parser.h | 3 +- .../parser/caffe/caffe_flatten_parser.cc | 26 +- .../parser/caffe/caffe_flatten_parser.h | 3 +- .../parser/caffe/caffe_innerproduct_parser.cc | 56 +- .../parser/caffe/caffe_innerproduct_parser.h | 3 +- .../converter/parser/caffe/caffe_inspector.cc | 8 +- .../converter/parser/caffe/caffe_inspector.h | 4 +- .../parser/caffe/caffe_interp_parser.cc | 30 +- .../parser/caffe/caffe_interp_parser.h | 3 +- .../parser/caffe/caffe_model_parser.cc | 554 +++++++++++------- .../parser/caffe/caffe_model_parser.h | 43 +- .../parser/caffe/caffe_node_parser.h | 8 +- .../parser/caffe/caffe_permute_parser.cc | 26 +- .../parser/caffe/caffe_permute_parser.h | 3 +- .../parser/caffe/caffe_pooling_parser.cc | 110 ++-- .../parser/caffe/caffe_pooling_parser.h | 3 +- .../parser/caffe/caffe_power_parser.cc | 25 +- .../parser/caffe/caffe_power_parser.h | 3 +- .../parser/caffe/caffe_prelu_parser.cc | 41 +- .../parser/caffe/caffe_prelu_parser.h | 3 +- .../parser/caffe/caffe_reduce_parser.cc | 63 +- .../parser/caffe/caffe_reduce_parser.h | 3 +- .../parser/caffe/caffe_relu6_parser.cc | 26 +- .../parser/caffe/caffe_relu6_parser.h | 3 +- .../parser/caffe/caffe_relu_parser.cc | 26 +- .../parser/caffe/caffe_relu_parser.h | 3 +- .../parser/caffe/caffe_reshape_parser.cc | 28 +- .../parser/caffe/caffe_reshape_parser.h | 3 +- .../parser/caffe/caffe_scale_parser.cc | 62 +- .../parser/caffe/caffe_scale_parser.h | 3 +- .../parser/caffe/caffe_sigmoid_parser.cc | 26 +- .../parser/caffe/caffe_sigmoid_parser.h | 3 +- .../parser/caffe/caffe_slice_parser.cc | 26 +- .../parser/caffe/caffe_slice_parser.h | 3 +- .../parser/caffe/caffe_softmax_parser.cc | 26 +- .../parser/caffe/caffe_softmax_parser.h | 3 +- .../parser/caffe/caffe_tanh_parser.cc | 26 +- .../parser/caffe/caffe_tanh_parser.h | 3 +- .../parser/caffe/caffe_tile_parser.cc | 25 +- .../parser/caffe/caffe_tile_parser.h | 3 +- .../parser/tflite/tflite_model_parser.cc | 2 +- .../tools/optimizer/fusion/conv_bn_fusion.cc | 40 +- 65 files changed, 751 insertions(+), 1054 deletions(-) diff --git a/mindspore/lite/tools/anf_exporter/anf_exporter.cc b/mindspore/lite/tools/anf_exporter/anf_exporter.cc index 48de5d5a44..e182fbbe9d 100644 --- a/mindspore/lite/tools/anf_exporter/anf_exporter.cc +++ b/mindspore/lite/tools/anf_exporter/anf_exporter.cc @@ -385,9 +385,14 @@ int AnfExporter::ConvertInputParameter(const std::shared_ptr &input_ano if (paramValue != nullptr) { paramTensor->data.resize(paramValue->tensor_size()); paramTensor->format = schema::Format(paramValue->format()); - memcpy(paramTensor->data.data(), paramValue->tensor_addr(), paramValue->tensor_size()); + if (EOK != memcpy_s(paramTensor->data.data(), paramTensor->data.size(), paramValue->tensor_addr(), + paramValue->tensor_size())) { + MS_LOG(ERROR) << "memcpy_s failed."; + return RET_ERROR; + } } + paramTensor->name = input_name; node_id_map_[input_name] = meta_graphT->allTensors.size(); output_cnode->inputIndex.emplace_back(meta_graphT->allTensors.size()); meta_graphT->allTensors.emplace_back(std::move(paramTensor)); @@ -572,9 +577,11 @@ void AnfExporter::SetOpOutputNode(const CNodePtr &cnode, const std::unique_ptrsize() == 1) { node_id_map_[cnode_name] = meta_graphT->allTensors.size(); + msTensor->name = cnode_name; } else { std::string name = cnode_name + "_o:" + std::to_string(i); node_id_map_[name] = meta_graphT->allTensors.size(); + msTensor->name = name; } meta_graphT->allTensors.emplace_back(msTensor); if (IsPrimitiveCNode(cnode, schema::PrimitiveType_Conv2D) || @@ -592,6 +599,7 @@ void AnfExporter::SetOpOutputNode(const CNodePtr &cnode, const std::unique_ptrnodeType = schema::NodeType_CNode; ms_tensor->dataType = TypeId::kNumberTypeFloat32; + ms_tensor->name = cnode_name; fb_node->outputIndex.emplace_back(meta_graphT->allTensors.size()); node_id_map_[cnode_name] = meta_graphT->allTensors.size(); meta_graphT->allTensors.emplace_back(ms_tensor); diff --git a/mindspore/lite/tools/converter/graphdef_transform.cc b/mindspore/lite/tools/converter/graphdef_transform.cc index ee2349d090..6ed31418b1 100644 --- a/mindspore/lite/tools/converter/graphdef_transform.cc +++ b/mindspore/lite/tools/converter/graphdef_transform.cc @@ -51,7 +51,7 @@ int GraphDefTransform::Transform(const converter::Flags &ctx) { { Optimizer unusedOpRemoveOptimizer; unusedOpRemoveOptimizer.AddPass(new UnusedNodeRemovePass()); - if (ctx.trainModel == false) { + if (!ctx.trainModel) { unusedOpRemoveOptimizer.AddPass(new DropoutNodeRemovePass()); } unusedOpRemoveOptimizer.AddPass(new IsolatedNodeRemovePass()); @@ -87,8 +87,14 @@ int GraphDefTransform::Transform(const converter::Flags &ctx) { // postconvert pass { Optimizer fusionOptimizer; - if (ctx.trainModel == false) { - fusionOptimizer.AddPass(new (std::nothrow) BatchNormConvertScalePass()); + if (!ctx.trainModel) { + auto batch_norm_scale_pass = new (std::nothrow) BatchNormConvertScalePass(); + if (batch_norm_scale_pass == nullptr) { + MS_LOG(ERROR) << "new batch_norm_scale_pass failed."; + return RET_ERROR; + } + batch_norm_scale_pass->SetFmk(ctx.fmk); + fusionOptimizer.AddPass(batch_norm_scale_pass); } fusionOptimizer.AddPass(new (std::nothrow) IsolatedNodeRemovePass()); status = fusionOptimizer.Run(graphDefT); @@ -116,7 +122,7 @@ int GraphDefTransform::Transform(const converter::Flags &ctx) { formatTransOptimizer.AddPass(new (std::nothrow) TransOpInsertPass()); formatTransOptimizer.AddPass(new (std::nothrow) FormatTransFusionPass()); formatTransOptimizer.AddPass(new (std::nothrow) IsolatedNodeRemovePass()); - if (ctx.trainModel == false && ctx.fmk != converter::FmkType_ONNX) { + if (!ctx.trainModel && ctx.fmk != converter::FmkType_ONNX) { formatTransOptimizer.AddPass(new (std::nothrow) GlobalFormatTransformPass()); formatTransOptimizer.AddPass(new (std::nothrow) IsolatedNodeRemovePass()); } diff --git a/mindspore/lite/tools/converter/legacy_optimizer/graph/batchnorm_convert_scale_pass.cc b/mindspore/lite/tools/converter/legacy_optimizer/graph/batchnorm_convert_scale_pass.cc index 4b689f354e..34296e4ef6 100644 --- a/mindspore/lite/tools/converter/legacy_optimizer/graph/batchnorm_convert_scale_pass.cc +++ b/mindspore/lite/tools/converter/legacy_optimizer/graph/batchnorm_convert_scale_pass.cc @@ -19,6 +19,7 @@ #include #include #include +#include "tools/converter/converter_flags.h" #include "third_party/securec/include/securec.h" #include "src/common/log_adapter.h" #include "tools/common/tensor_util.h" @@ -27,10 +28,9 @@ namespace mindspore { namespace lite { -#define CAFFE_BATCHNORM_OP_WEIGHT_NUM 2 -#define TF_BATCHNORM_OP_WEIGHT_NUM 4 #define CAFFE_BATCHNORM_MEAN_INDEX 0 #define CAFFE_BATCHNORM_VARIANCE_INDEX 1 +#define CAFFE_BATCHNORM_SCALE_INDEX 2 #define TF_BATCHNORM_SCALE_INDEX 0 #define TF_BATCHNORM_BIAS_INDEX 1 #define TF_BATCHNORM_MEAN_INDEX 2 @@ -237,18 +237,27 @@ STATUS BatchNormConvertScalePass::GetBnWeightTensors(MetaGraphT *graph, BNWeight MS_ASSERT(graph->allTensors.size() > bnNode->inputIndex.at(1)); auto bnWeightTensorIdxes = bnNode->inputIndex; bnWeightTensorIdxes.erase(bnWeightTensorIdxes.begin()); - if (bnWeightTensorIdxes.size() == CAFFE_BATCHNORM_OP_WEIGHT_NUM) { + if (fmkType == converter::FmkType_CAFFE) { bnWeightTensors->meanTensor = graph->allTensors.at(bnWeightTensorIdxes[CAFFE_BATCHNORM_MEAN_INDEX]).get(); bnWeightTensors->varianceTensor = graph->allTensors.at(bnWeightTensorIdxes[CAFFE_BATCHNORM_VARIANCE_INDEX]).get(); - } else if (bnWeightTensorIdxes.size() == TF_BATCHNORM_OP_WEIGHT_NUM) { + auto scaleTensor = graph->allTensors.at(bnWeightTensorIdxes[CAFFE_BATCHNORM_SCALE_INDEX]).get(); + + // calibrate mean and variance + float scale_factor_data = (reinterpret_cast(scaleTensor->data.data()))[0]; + float scale_factor = scale_factor_data == 0 ? 0 : 1 / scale_factor_data; + auto mean_data = reinterpret_cast(bnWeightTensors->meanTensor->data.data()); + auto variance_data = reinterpret_cast(bnWeightTensors->varianceTensor->data.data()); + for (size_t i = 0; i < GetShapeSize(*bnWeightTensors->meanTensor); i++) { + mean_data[i] *= scale_factor; + } + for (size_t i = 0; i < GetShapeSize(*bnWeightTensors->varianceTensor); i++) { + variance_data[i] *= scale_factor; + } + } else { bnWeightTensors->scaleTensor = graph->allTensors.at(bnWeightTensorIdxes[TF_BATCHNORM_SCALE_INDEX]).get(); bnWeightTensors->biasTensor = graph->allTensors.at(bnWeightTensorIdxes[TF_BATCHNORM_BIAS_INDEX]).get(); bnWeightTensors->meanTensor = graph->allTensors.at(bnWeightTensorIdxes[TF_BATCHNORM_MEAN_INDEX]).get(); bnWeightTensors->varianceTensor = graph->allTensors.at(bnWeightTensorIdxes[TF_BATCHNORM_VARIANCE_INDEX]).get(); - } else { - MS_LOG(ERROR) << "BatchNorm should has 2 or 4 weight tensors, current number of weight tensors: " - << bnWeightTensorIdxes.size(); - return RET_ERROR; } if (bnWeightTensors->meanTensor == nullptr) { diff --git a/mindspore/lite/tools/converter/legacy_optimizer/graph/batchnorm_convert_scale_pass.h b/mindspore/lite/tools/converter/legacy_optimizer/graph/batchnorm_convert_scale_pass.h index b7a9eedee7..e9dab257a2 100644 --- a/mindspore/lite/tools/converter/legacy_optimizer/graph/batchnorm_convert_scale_pass.h +++ b/mindspore/lite/tools/converter/legacy_optimizer/graph/batchnorm_convert_scale_pass.h @@ -23,6 +23,7 @@ #include #include "tools/common/graph_util.h" #include "tools/converter/optimizer.h" +#include "tools/converter/converter_flags.h" using mindspore::schema::TensorT; namespace mindspore { @@ -41,6 +42,8 @@ class BatchNormConvertScalePass : public GraphPass { STATUS Run(MetaGraphT *graph) override; + void SetFmk(converter::FmkType fmk) { this->fmkType = fmk; } + protected: STATUS GetTransParam(MetaGraphT *graph, const std::unique_ptr &bnNode); @@ -60,6 +63,7 @@ class BatchNormConvertScalePass : public GraphPass { float *transBias = nullptr; std::unique_ptr newScaleWeightTensor = nullptr; std::unique_ptr newScaleBiasTensor = nullptr; + converter::FmkType fmkType = converter::FmkType_TF; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_argmax_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_argmax_parser.cc index a2716ba53d..b5b9501700 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_argmax_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_argmax_parser.cc @@ -19,23 +19,12 @@ namespace mindspore { namespace lite { -STATUS CaffeArgMaxParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeArgMaxParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +lite::PrimitiveC *CaffeArgMaxParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } attr->outMaxValue = false; @@ -58,11 +47,10 @@ STATUS CaffeArgMaxParser::Parse(const caffe::LayerParameter &proto, const caffe: attr->axis = axis; attr->axisType = axisType; attr->keepDims = true; - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_ArgMax; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_ArgMax; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeArgMaxParser("ArgMax", new CaffeArgMaxParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_argmax_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_argmax_parser.h index 672699904a..590c7f73e0 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_argmax_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_argmax_parser.h @@ -18,6 +18,7 @@ #define MINDSPORE_LITE_TOOLS_CONVERTER_PARSER_CAFFE_CAFFE_ARGMAX_PARSER_H_ #include +#include "src/ops/primitive_c.h" #include "tools/converter/parser/caffe/caffe_node_parser.h" #include "tools/converter/parser/caffe/caffe_node_parser_registry.h" @@ -28,8 +29,8 @@ class CaffeArgMaxParser : public CaffeNodeParser { CaffeArgMaxParser() : CaffeNodeParser("argmax") {} ~CaffeArgMaxParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + lite::PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_batchnorm_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_batchnorm_parser.cc index 12b72ff730..65b9377045 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_batchnorm_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_batchnorm_parser.cc @@ -23,39 +23,24 @@ namespace mindspore { namespace lite { using STATUS = int; -STATUS CaffeBatchNormParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeBatchNormParser"; - if (weightVec == nullptr) { - MS_LOG(ERROR) << "weightVec is null"; - return RET_NULL_PTR; - } - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeBatchNormParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } const caffe::BatchNormParameter &batchNormParam = proto.batch_norm_param(); if (proto.bottom_size() != 1) { MS_LOG(ERROR) << "Layer " << proto.name().c_str() << "bottom numbers is error, it must be 1, but is " << proto.bottom_size(); - return RET_ERROR; + return nullptr; } if (proto.top_size() != 1) { MS_LOG(ERROR) << "Layer " << proto.name().c_str() << "top numbers is error, it must be 1, but is " << proto.top_size(); - return RET_ERROR; + return nullptr; } if (batchNormParam.has_eps()) { @@ -69,40 +54,10 @@ STATUS CaffeBatchNormParser::Parse(const caffe::LayerParameter &proto, const caf attr->epsilon = 1e-5; } - const float blob2Data = - (weight.blobs(2).double_data_size() > 0) ? weight.blobs(2).double_data(0) : weight.blobs(2).data(0); - const float scaleFactor = blob2Data == 0 ? 0 : 1 / blob2Data; - - auto gamma = ConvertWeight(weight.blobs(0)); - if (gamma == nullptr) { - MS_LOG(ERROR) << "Convert blobs(0) for layer " << weight.name().c_str() << " failed"; - return RET_ERROR; - } - auto estimatedMean = reinterpret_cast(gamma->data.data()); - auto estimatedMeanShapeSize = GetShapeSize(*gamma); - for (size_t i = 0; i < estimatedMeanShapeSize; i++) { - estimatedMean[i] = estimatedMean[i] * scaleFactor; - } - estimatedMean = nullptr; - weightVec->push_back(gamma); - - auto beta = ConvertWeight(weight.blobs(1)); - if (beta == nullptr) { - MS_LOG(ERROR) << "Convert blobs(1) for layer " << weight.name().c_str() << " failed"; - return RET_ERROR; - } - auto estimatedVariance = reinterpret_cast(beta->data.data()); - size_t estimatedVarianceShapeSize = GetShapeSize(*beta); - for (size_t i = 0; i < estimatedVarianceShapeSize; i++) { - estimatedVariance[i] = estimatedVariance[i] * scaleFactor; - } - estimatedVariance = nullptr; - weightVec->push_back(beta); - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_BatchNorm; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_BatchNorm; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeBatchNormParser("BatchNorm", new CaffeBatchNormParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_batchnorm_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_batchnorm_parser.h index 9079682697..c82487b6e0 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_batchnorm_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_batchnorm_parser.h @@ -18,6 +18,7 @@ #define MINDSPORE_LITE_TOOLS_CONVERTER_PARSER_CAFFE_CAFFE_BATCHNORM_PARSER_H_ #include +#include "src/ops/primitive_c.h" #include "tools/converter/parser/caffe/caffe_node_parser.h" #include "tools/converter/parser/caffe/caffe_node_parser_registry.h" @@ -28,8 +29,7 @@ class CaffeBatchNormParser : public CaffeNodeParser { CaffeBatchNormParser() : CaffeNodeParser("batchnorm") {} ~CaffeBatchNormParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_concat_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_concat_parser.cc index 3df92446f6..3201b81333 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_concat_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_concat_parser.cc @@ -19,29 +19,18 @@ namespace mindspore { namespace lite { -STATUS CaffeConcatParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeConcatParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeConcatParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } const caffe::ConcatParameter &concatParam = proto.concat_param(); if (concatParam.has_axis() && concatParam.has_concat_dim()) { MS_LOG(ERROR) << "Concat param in caffe have concat_dim and axis simultaneously, return fail"; - return RET_ERROR; + return nullptr; } if (concatParam.has_concat_dim()) { @@ -49,7 +38,7 @@ STATUS CaffeConcatParser::Parse(const caffe::LayerParameter &proto, const caffe: auto concat_dim_value = (int32_t)concatParam.concat_dim(); if (concat_dim_value < 0) { MS_LOG(ERROR) << "concat_dim value in model is smaller than 0:" << concat_dim_value; - return RET_ERROR; + return nullptr; } attr->axis = concat_dim_value; } else if (concatParam.has_axis()) { @@ -60,11 +49,12 @@ STATUS CaffeConcatParser::Parse(const caffe::LayerParameter &proto, const caffe: MS_LOG(DEBUG) << "by default, set axis = 1"; attr->axis = 1; } + attr->n = proto.bottom_size(); - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Concat; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Concat; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeConcatParser("Concat", new CaffeConcatParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_concat_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_concat_parser.h index c19f96f0d6..769b3eddb2 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_concat_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_concat_parser.h @@ -21,17 +21,14 @@ #include "tools/converter/parser/caffe/caffe_node_parser.h" #include "tools/converter/parser/caffe/caffe_node_parser_registry.h" -namespace mindspore { -namespace lite { +namespace mindspore::lite { class CaffeConcatParser : public CaffeNodeParser { public: CaffeConcatParser() : CaffeNodeParser("concat") {} ~CaffeConcatParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; -} // namespace lite -} // namespace mindspore +} // namespace mindspore::lite #endif // MINDSPORE_LITE_TOOLS_CONVERTER_PARSER_CAFFE_CAFFE_CONCAT_PARSER_H_ diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_converter.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_converter.cc index cd4dea78ed..a63d5602f4 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_converter.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_converter.cc @@ -15,9 +15,8 @@ */ #include "tools/converter/parser/caffe/caffe_converter.h" +#include "tools/converter/parser/caffe/caffe_model_parser.h" -namespace mindspore { -namespace lite { +namespace mindspore::lite { CaffeConverter::CaffeConverter() { modelParser = new CaffeModelParser(); } -} // namespace lite -} // namespace mindspore +} // namespace mindspore::lite diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_converter.h b/mindspore/lite/tools/converter/parser/caffe/caffe_converter.h index 93a6f7ffbf..0c0367b32c 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_converter.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_converter.h @@ -20,7 +20,6 @@ #include #include #include "tools/converter/converter.h" -#include "tools/converter/parser/caffe/caffe_model_parser.h" #include "tools/converter/graphdef_transform.h" namespace mindspore::lite { diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_convolution_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_convolution_parser.cc index 0e8149980e..c21d500b3e 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_convolution_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_convolution_parser.cc @@ -19,7 +19,7 @@ namespace mindspore { namespace lite { -STATUS CaffeConvolutionParser::ParseGroupConvolution(schema::CNodeT *op, schema::Conv2DT *attr) { +STATUS CaffeConvolutionParser::ParseGroupConvolution(schema::PrimitiveT *primitiveT, schema::Conv2DT *attr) { if (attr->group == 1) { return RET_OK; } @@ -46,32 +46,17 @@ STATUS CaffeConvolutionParser::ParseGroupConvolution(schema::CNodeT *op, schema: depthwiseConv2DParam->hasBias = attr->hasBias; depthwiseConv2DParam->activationType = attr->activationType; delete attr; - op->primitive->value.type = schema::PrimitiveType_DepthwiseConv2D; - op->primitive->value.value = depthwiseConv2DParam.release(); + primitiveT->value.type = schema::PrimitiveType_DepthwiseConv2D; + primitiveT->value.value = depthwiseConv2DParam.release(); return RET_OK; } -STATUS CaffeConvolutionParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeConvolutionParser"; - if (weightVec == nullptr) { - MS_LOG(ERROR) << "weightVec is null"; - return RET_NULL_PTR; - } - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeConvolutionParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { auto attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new attr failed"; - return RET_NULL_PTR; + return nullptr; } attr->format = schema::Format_NCHW; @@ -82,7 +67,7 @@ STATUS CaffeConvolutionParser::Parse(const caffe::LayerParameter &proto, const c auto status = CaffeConvBaseParser::ParsePads(convParam, &pad); if (status != RET_OK) { MS_LOG(ERROR) << "ParsePads for " << proto.name().c_str() << " failed"; - return RET_ERROR; + return nullptr; } attr->padUp = pad[0]; attr->padDown = pad[1]; @@ -94,7 +79,7 @@ STATUS CaffeConvolutionParser::Parse(const caffe::LayerParameter &proto, const c status = CaffeConvBaseParser::ParseStrides(convParam, &stride); if (status != RET_OK) { MS_LOG(ERROR) << "ParseStrides for " << proto.name().c_str() << " failed"; - return RET_ERROR; + return nullptr; } attr->strideH = stride[0]; attr->strideW = stride[1]; @@ -104,7 +89,7 @@ STATUS CaffeConvolutionParser::Parse(const caffe::LayerParameter &proto, const c status = CaffeConvBaseParser::ParseDilations(convParam, &dilation); if (status != RET_OK) { MS_LOG(ERROR) << "ParseDilations for " << proto.name().c_str() << " failed"; - return RET_ERROR; + return nullptr; } attr->dilateH = dilation[0]; attr->dilateW = dilation[1]; @@ -114,7 +99,7 @@ STATUS CaffeConvolutionParser::Parse(const caffe::LayerParameter &proto, const c status = CaffeConvBaseParser::ParseKernels(convParam, &kernel); if (status != RET_OK) { MS_LOG(ERROR) << "ParseKernels for " << proto.name().c_str() << " failed"; - return RET_ERROR; + return nullptr; } attr->kernelH = kernel[0]; attr->kernelW = kernel[1]; @@ -124,7 +109,7 @@ STATUS CaffeConvolutionParser::Parse(const caffe::LayerParameter &proto, const c auto ret = CaffeConvBaseParser::ParseChannelOut(convParam, &(attr->channelOut)); if (ret != RET_OK) { MS_LOG(ERROR) << "conv channel out failed"; - return RET_ERROR; + return nullptr; } auto &weightBlob = weight.blobs(0); if (weightBlob.has_shape()) { @@ -134,23 +119,17 @@ STATUS CaffeConvolutionParser::Parse(const caffe::LayerParameter &proto, const c } attr->padMode = schema::PadMode_CAFFE; - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Conv2D; - op->primitive->value.value = attr.release(); + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Conv2D; + primitive->value.value = attr.release(); - status = ParseGroupConvolution(op, static_cast(op->primitive->value.value)); + status = ParseGroupConvolution(primitive.get(), static_cast(primitive->value.value)); if (status != RET_OK) { MS_LOG(ERROR) << "Parse group convolution failed"; - return RET_ERROR; - } - - status = CaffeConvBaseParser::ParseWeight(weight, weightVec); - if (status != RET_OK) { - MS_LOG(ERROR) << "ParseWeight for " << proto.name().c_str() << " failed"; - return RET_ERROR; + return nullptr; } - return status; + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeConvolutionParser("Convolution", new CaffeConvolutionParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_convolution_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_convolution_parser.h index 50a411b345..19cb6eab28 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_convolution_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_convolution_parser.h @@ -29,11 +29,10 @@ class CaffeConvolutionParser : public CaffeNodeParser { CaffeConvolutionParser() : CaffeNodeParser("convolution") {} ~CaffeConvolutionParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; private: - static STATUS ParseGroupConvolution(schema::CNodeT *op, schema::Conv2DT *attr); + static STATUS ParseGroupConvolution(schema::PrimitiveT *primitiveT, schema::Conv2DT *attr); }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_crop_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_crop_parser.cc index e36d1e2059..53962956e7 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_crop_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_crop_parser.cc @@ -19,27 +19,12 @@ namespace mindspore { namespace lite { -STATUS CaffeCropParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeCropParser"; - if (weightVec == nullptr) { - MS_LOG(ERROR) << "weightVec is null"; - return RET_NULL_PTR; - } - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeCropParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } if (!proto.has_crop_param()) { @@ -66,11 +51,10 @@ STATUS CaffeCropParser::Parse(const caffe::LayerParameter &proto, const caffe::L attr->offsets = offsets; } } - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Crop; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Crop; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeCropParser("Crop", new CaffeCropParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_crop_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_crop_parser.h index 8714070431..69194ec13b 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_crop_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_crop_parser.h @@ -28,8 +28,7 @@ class CaffeCropParser : public CaffeNodeParser { CaffeCropParser() : CaffeNodeParser("crop") {} ~CaffeCropParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_deconvolution_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_deconvolution_parser.cc index 5f6325f5d5..4e8d621775 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_deconvolution_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_deconvolution_parser.cc @@ -19,7 +19,7 @@ namespace mindspore { namespace lite { -STATUS CaffeDeconvolutionParser::ParseGroupDeconvolution(schema::CNodeT *op, schema::DeConv2DT *attr) { +STATUS CaffeDeconvolutionParser::ParseGroupDeconvolution(schema::PrimitiveT *primitive, schema::DeConv2DT *attr) { if (attr->group == 1) { return RET_OK; } @@ -46,28 +46,13 @@ STATUS CaffeDeconvolutionParser::ParseGroupDeconvolution(schema::CNodeT *op, sch deDepthwiseConv2DParam->hasBias = attr->hasBias; deDepthwiseConv2DParam->activationType = attr->activationType; delete attr; - op->primitive->value.type = schema::PrimitiveType_DeDepthwiseConv2D; - op->primitive->value.value = deDepthwiseConv2DParam.release(); + primitive->value.type = schema::PrimitiveType_DeDepthwiseConv2D; + primitive->value.value = deDepthwiseConv2DParam.release(); return RET_OK; } -STATUS CaffeDeconvolutionParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeDeconvolutionParser"; - if (weightVec == nullptr) { - MS_LOG(ERROR) << "weightVec is null"; - return RET_NULL_PTR; - } - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeDeconvolutionParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr(new (std::nothrow) schema::DeConv2DT()); attr->format = schema::Format::Format_NCHW; @@ -78,7 +63,7 @@ STATUS CaffeDeconvolutionParser::Parse(const caffe::LayerParameter &proto, const auto status = CaffeConvBaseParser::ParsePads(convParam, &pad); if (status != RET_OK) { MS_LOG(ERROR) << "ParsePads for " << proto.name().c_str() << " failed"; - return RET_ERROR; + return nullptr; } attr->padUp = pad[0]; attr->padDown = pad[1]; @@ -90,7 +75,7 @@ STATUS CaffeDeconvolutionParser::Parse(const caffe::LayerParameter &proto, const status = CaffeConvBaseParser::ParseStrides(convParam, &stride); if (status != RET_OK) { MS_LOG(ERROR) << "ParseStrides for " << proto.name().c_str() << " failed"; - return RET_ERROR; + return nullptr; } attr->strideH = stride[0]; attr->strideW = stride[1]; @@ -100,7 +85,7 @@ STATUS CaffeDeconvolutionParser::Parse(const caffe::LayerParameter &proto, const status = CaffeConvBaseParser::ParseDilations(convParam, &dilation); if (status != RET_OK) { MS_LOG(ERROR) << "ParseDilations for " << proto.name().c_str() << " failed"; - return RET_ERROR; + return nullptr; } attr->dilateH = dilation[0]; attr->dilateW = dilation[1]; @@ -110,7 +95,7 @@ STATUS CaffeDeconvolutionParser::Parse(const caffe::LayerParameter &proto, const status = CaffeConvBaseParser::ParseKernels(convParam, &kernel); if (status != RET_OK) { MS_LOG(ERROR) << "ParseKernels for " << proto.name().c_str() << " failed"; - return RET_ERROR; + return nullptr; } attr->kernelH = kernel[0]; attr->kernelW = kernel[1]; @@ -120,7 +105,7 @@ STATUS CaffeDeconvolutionParser::Parse(const caffe::LayerParameter &proto, const auto ret = CaffeConvBaseParser::ParseChannelOut(convParam, &(attr->channelOut)); if (ret != RET_OK) { MS_LOG(ERROR) << "deconv channel get failed"; - return RET_ERROR; + return nullptr; } auto &weightBlob = weight.blobs(0); if (weightBlob.has_shape()) { @@ -132,24 +117,16 @@ STATUS CaffeDeconvolutionParser::Parse(const caffe::LayerParameter &proto, const attr->channelIn = weightBlob.num() * attr->group; } attr->padMode = schema::PadMode_CAFFE; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_DeConv2D; + primitive->value.value = attr.release(); - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_DeConv2D; - op->primitive->value.value = attr.get(); - - status = ParseGroupDeconvolution(op, attr.release()); + status = ParseGroupDeconvolution(primitive.get(), primitive->value.AsDeConv2D()); if (status != RET_OK) { MS_LOG(ERROR) << "Parse group deconvolution failed"; - return RET_ERROR; - } - - status = CaffeConvBaseParser::ParseWeight(weight, weightVec); - if (status != RET_OK) { - MS_LOG(ERROR) << "ParseWeight for " << proto.name().c_str() << " failed"; - return RET_ERROR; + return nullptr; } - - return status; + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeDeconvolutionParser("Deconvolution", new CaffeDeconvolutionParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_deconvolution_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_deconvolution_parser.h index 56707f75da..53136419df 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_deconvolution_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_deconvolution_parser.h @@ -29,11 +29,10 @@ class CaffeDeconvolutionParser : public CaffeNodeParser { CaffeDeconvolutionParser() : CaffeNodeParser("deconvolution") {} ~CaffeDeconvolutionParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; private: - static STATUS ParseGroupDeconvolution(schema::CNodeT *op, schema::DeConv2DT *attr); + static STATUS ParseGroupDeconvolution(schema::PrimitiveT *primitive, schema::DeConv2DT *attr); }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_eltwise_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_eltwise_parser.cc index 041bb7cdcf..bb37506265 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_eltwise_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_eltwise_parser.cc @@ -20,47 +20,36 @@ namespace mindspore { namespace lite { -STATUS CaffeEltwiseParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeEltwiseParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeEltwiseParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } if (proto.bottom_size() < 2) { MS_LOG(ERROR) << "Eltwise Op " << proto.name() << " need at least 2 inputs,but input size is " << proto.bottom_size(); - return RET_ERROR; + return nullptr; } const caffe::EltwiseParameter &eltwiseParam = proto.eltwise_param(); if (eltwiseParam.coeff_size() != 0 && eltwiseParam.coeff_size() != proto.bottom_size()) { MS_LOG(ERROR) << "Coeff size(" << eltwiseParam.coeff_size() << ") check fail, Eltwise Layer takes one coefficient per bottom blob."; - return RET_ERROR; + return nullptr; } if (eltwiseParam.operation() == caffe::EltwiseParameter::PROD && eltwiseParam.coeff_size() != 0) { MS_LOG(ERROR) << "Eltwise layer only takes coefficients for summation."; - return RET_ERROR; + return nullptr; } if (eltwiseParam.coeff_size() != 0 && (std::fabs(eltwiseParam.coeff(0) - 1) > 1e-5 || std::fabs(eltwiseParam.coeff(1) - 1) > 1e-5)) { MS_LOG(ERROR) << "Eltwise only support coefficient 1 for summation now."; - return RET_ERROR; + return nullptr; } if (proto.has_eltwise_param() && eltwiseParam.has_operation()) { @@ -76,16 +65,15 @@ STATUS CaffeEltwiseParser::Parse(const caffe::LayerParameter &proto, const caffe break; default: MS_LOG(ERROR) << "Eltwise parse params fail, unsupported opration: " << eltwiseParam.operation(); - return RET_ERROR; + return nullptr; } } else { attr->mode = schema::EltwiseMode_SUM; } - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Eltwise; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Eltwise; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeEltwiseParser("Eltwise", new CaffeEltwiseParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_eltwise_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_eltwise_parser.h index c210666fcb..126aa921d9 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_eltwise_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_eltwise_parser.h @@ -28,8 +28,7 @@ class CaffeEltwiseParser : public CaffeNodeParser { CaffeEltwiseParser() : CaffeNodeParser("eltwise") {} ~CaffeEltwiseParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_elu_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_elu_parser.cc index 6e70ec6583..d7ab4d5ee6 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_elu_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_elu_parser.cc @@ -19,23 +19,12 @@ namespace mindspore { namespace lite { -STATUS CaffeEluParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeEluParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeEluParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } if (proto.has_elu_param()) { @@ -44,11 +33,10 @@ STATUS CaffeEluParser::Parse(const caffe::LayerParameter &proto, const caffe::La attr->alpha = eluParameter.alpha(); } } - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Elu; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Elu; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeEluParser("ELU", new CaffeEluParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_elu_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_elu_parser.h index fc544b57f3..d9757c4ac3 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_elu_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_elu_parser.h @@ -28,8 +28,7 @@ class CaffeEluParser : public CaffeNodeParser { CaffeEluParser() : CaffeNodeParser("elu") {} ~CaffeEluParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_exp_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_exp_parser.cc index e23a52104e..c0cf5c8ff8 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_exp_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_exp_parser.cc @@ -20,23 +20,12 @@ namespace mindspore { namespace lite { -STATUS CaffeExpParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse ExpParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeExpParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } const caffe::ExpParameter &exp_param = proto.exp_param(); @@ -55,10 +44,10 @@ STATUS CaffeExpParser::Parse(const caffe::LayerParameter &proto, const caffe::La } else { attr->shift = 0; } - - op->primitive->value.type = schema::PrimitiveType_Exp; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Exp; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeExpParser("Exp", new CaffeExpParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_exp_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_exp_parser.h index 940e59bde0..9e8ba424bf 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_exp_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_exp_parser.h @@ -28,8 +28,7 @@ class CaffeExpParser : public CaffeNodeParser { CaffeExpParser() : CaffeNodeParser("exp") {} ~CaffeExpParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_flatten_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_flatten_parser.cc index 517588120b..78263fe24a 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_flatten_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_flatten_parser.cc @@ -19,29 +19,17 @@ namespace mindspore { namespace lite { -STATUS CaffeFlattenParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeFlattenParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeFlattenParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Flatten; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Flatten; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_CaffeFlattenParser("Flatten", new CaffeFlattenParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_flatten_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_flatten_parser.h index 60668a41b6..71f79f6643 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_flatten_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_flatten_parser.h @@ -28,8 +28,7 @@ class CaffeFlattenParser : public CaffeNodeParser { CaffeFlattenParser() : CaffeNodeParser("flatten") {} ~CaffeFlattenParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_innerproduct_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_innerproduct_parser.cc index 8c5fd191a6..8ea77c35e7 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_innerproduct_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_innerproduct_parser.cc @@ -19,33 +19,18 @@ namespace mindspore { namespace lite { -STATUS CaffeInnerProductParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeInnerProductParser"; - if (weightVec == nullptr) { - MS_LOG(ERROR) << "weightVec is null"; - return RET_NULL_PTR; - } - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeInnerProductParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } const caffe::InnerProductParameter &innerProductParam = proto.inner_product_param(); if (!innerProductParam.has_num_output()) { MS_LOG(ERROR) << "InnerProduct Parse num_output for " << proto.name().c_str() << " failed."; - return RET_ERROR; + return nullptr; } if (innerProductParam.axis() == 1) { @@ -53,40 +38,17 @@ STATUS CaffeInnerProductParser::Parse(const caffe::LayerParameter &proto, const attr->useAxis = true; } else { MS_LOG(ERROR) << "InnerProduct Parse axis only support default 1, but actually " << innerProductParam.axis(); - return RET_ERROR; + return nullptr; } if (innerProductParam.bias_term()) { attr->hasBias = true; } attr->activationType = schema::ActivationType_NO_ACTIVATION; - - // parse weight - if (weight.blobs_size() == 0) { - MS_LOG(ERROR) << "InnerProduct No filter data in layer " << weight.name().c_str(); - return RET_ERROR; - } - auto filter = ConvertWeight(weight.blobs(0)); - if (filter == nullptr) { - MS_LOG(ERROR) << "InnerProduct parse weight for layer " << weight.name().c_str() << " failed"; - return RET_ERROR; - } - weightVec->push_back(filter); - - // parse bias - if (innerProductParam.bias_term() && weight.blobs_size() > 1) { - auto bias = ConvertWeight(weight.blobs(1)); - if (bias == nullptr) { - MS_LOG(ERROR) << "InnerProduct parse bias for layer " << weight.name().c_str() << " failed"; - return RET_ERROR; - } - weightVec->push_back(bias); - } - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_FullConnection; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_FullConnection; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeInnerProductParser("InnerProduct", new CaffeInnerProductParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_innerproduct_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_innerproduct_parser.h index dd06c42ae5..298f81a7d6 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_innerproduct_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_innerproduct_parser.h @@ -28,8 +28,7 @@ class CaffeInnerProductParser : public CaffeNodeParser { CaffeInnerProductParser() : CaffeNodeParser("innerproduct") {} ~CaffeInnerProductParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_inspector.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_inspector.cc index 42bf574c89..fabbe6ffa2 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_inspector.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_inspector.cc @@ -29,9 +29,9 @@ STATUS CaffeInspector::InspectModel(const caffe::NetParameter &proto) { ParseInput(); - SetTopsAndBottoms(); + SetLayerTopsAndBottoms(); - FindInputAndOutput(); + FindGraphInputsAndOutputs(); return RET_OK; } @@ -46,7 +46,7 @@ STATUS CaffeInspector::ParseInput() { return RET_OK; } -STATUS CaffeInspector::FindInputAndOutput() { +STATUS CaffeInspector::FindGraphInputsAndOutputs() { for (const auto &iter : layerBottoms) { if (layerTops.find(iter) == layerTops.end()) { graphInput.insert(iter); @@ -60,7 +60,7 @@ STATUS CaffeInspector::FindInputAndOutput() { return RET_OK; } -STATUS CaffeInspector::SetTopsAndBottoms() { +STATUS CaffeInspector::SetLayerTopsAndBottoms() { for (int32_t i = 0; i < net.layer_size(); i++) { auto &layer = const_cast(net.layer(i)); if (layer.top_size() == 1 && layer.bottom_size() == 1 && layer.top(0) == layer.bottom(0)) { diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_inspector.h b/mindspore/lite/tools/converter/parser/caffe/caffe_inspector.h index 5488f6d2e9..bb2a6dffee 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_inspector.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_inspector.h @@ -33,8 +33,8 @@ class CaffeInspector { STATUS InspectModel(const caffe::NetParameter &proto); STATUS ParseInput(); - STATUS FindInputAndOutput(); - STATUS SetTopsAndBottoms(); + STATUS FindGraphInputsAndOutputs(); + STATUS SetLayerTopsAndBottoms(); std::set GetGraphInput() { return graphInput; } std::set GetGraphOutput() { return graphOutput; } diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_interp_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_interp_parser.cc index d607002e16..2ea5774907 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_interp_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_interp_parser.cc @@ -19,23 +19,12 @@ namespace mindspore { namespace lite { -STATUS CaffeInterpParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeInterpParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeInterpParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } const caffe::InterpParameter &interpParam = proto.interp_param(); @@ -43,7 +32,7 @@ STATUS CaffeInterpParser::Parse(const caffe::LayerParameter &proto, const caffe: int64_t height = interpParam.height(); if (height < 0) { MS_LOG(ERROR) << "Interp height must be > 0"; - return RET_ERROR; + return nullptr; } attr->newHeight = height; } @@ -52,17 +41,16 @@ STATUS CaffeInterpParser::Parse(const caffe::LayerParameter &proto, const caffe: int64_t width = interpParam.width(); if (width < 0) { MS_LOG(ERROR) << "Interp width must be > 0"; - return RET_ERROR; + return nullptr; } attr->newWidth = width; } attr->alignCorners = true; attr->method = schema::ResizeMethod_LINEAR; - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Resize; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Resize; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeInterpParser("Interp", new CaffeInterpParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_interp_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_interp_parser.h index decf497470..bdaaa170c1 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_interp_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_interp_parser.h @@ -28,8 +28,7 @@ class CaffeInterpParser : public CaffeNodeParser { CaffeInterpParser() : CaffeNodeParser("Interp") {} ~CaffeInterpParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_model_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_model_parser.cc index 926b1c33eb..a917fae399 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_model_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_model_parser.cc @@ -13,299 +13,419 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - #include "tools/converter/parser/caffe/caffe_model_parser.h" #include #include -#include +#include +#include #include "tools/converter/parser/caffe/caffe_node_parser_registry.h" #include "tools/converter/parser/caffe/caffe_inspector.h" #include "tools/common/graph_util.h" #include "tools/common/protobuf_utils.h" +#include "src/param_value_lite.h" -namespace mindspore { -namespace lite { -CaffeModelParser::CaffeModelParser() {} - -CaffeModelParser::~CaffeModelParser() {} +namespace mindspore::lite { +CaffeModelParser::CaffeModelParser() = default; -const std::set CaffeModelParser::skipedLayerType = {"Dropout"}; +CaffeModelParser::~CaffeModelParser() = default; -schema::MetaGraphT *CaffeModelParser::ParseToFb(const std::string &model_file, const std::string &weight_file, - const QuantType &quant_type) { - int status = ValidateFileStr(model_file, ".prototxt"); +FuncGraphPtr CaffeModelParser::Parse(const std::string &model_file, const std::string &weight_file, + const QuantType &quant_type) { + STATUS status = InitOriginModel(model_file, weight_file); if (status != RET_OK) { - MS_LOG(ERROR) << "INPUT ILLEGAL: modelFile must be *.prototxt"; ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); return nullptr; } - - if (weight_file.empty()) { - MS_LOG(ERROR) << "INPUT MISSING: weightFile is necessary"; - ReturnCode::GetSingleReturnCode()->UpdateReturnCode(RET_GRAPH_FILE_ERR); + func_graph_ptr_ = std::make_shared(); + status = ConvertGraphInputs(); + if (status != RET_OK) { + ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); return nullptr; } - status = ValidateFileStr(weight_file, ".caffemodel"); + status = ConvertLayers(); if (status != RET_OK) { - MS_LOG(ERROR) << "INPUT ILLEGAL: weightFile must be *.caffemodel"; ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); return nullptr; } - auto metaGraph = std::make_unique(); - TensorCache tensorCache; - - caffe::NetParameter proto; - status = ReadProtoFromText((const char *)model_file.c_str(), &proto); + status = ConvertGraphOutputs(); if (status != RET_OK) { - MS_LOG(ERROR) << "Read prototxt file failed, model path: " << model_file; ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); return nullptr; } - metaGraph->name = proto.name(); + return func_graph_ptr_; +} + +STATUS CaffeModelParser::ConvertLayers() { + STATUS status = RET_OK; + std::map weight_layers; + for (int i = 0; i < caffe_weight_.layer_size(); i++) { + auto weight_layer = caffe_weight_.layer(i); + weight_layers[weight_layer.name()] = weight_layer; + } + for (int i = 0; i < caffe_model_.layer_size(); i++) { + auto layer = caffe_model_.layer(i); + caffe::LayerParameter weight; + if (weight_layers.find(layer.name()) != weight_layers.end()) { + weight = weight_layers.find(layer.name())->second; + } + + if (IsSkipedLayer(layer)) { + continue; + } + + // parse primitive + auto node_parser = CaffeNodeParserRegistry::GetInstance()->GetNodeParser(layer.type()); + if (node_parser == nullptr) { + NoSupportOp::GetInstance()->InsertOp(layer.type()); + status = (status == RET_OK ? RET_NOT_FIND_OP : status); + continue; + } + + if (status != RET_OK) { + continue; + } + + auto primitive_c = node_parser->ParseLitePrimitive(layer, weight); + if (primitive_c == nullptr) { + MS_LOG(ERROR) << "parse node " << layer.name() << " failed."; + continue; + } + + // build inputs + std::vector input_nodes; + status = ConvertBottom(layer, &input_nodes); + if (status != RET_OK) { + MS_LOG(ERROR) << "Convert layer bottom for " << layer.name() << " failed."; + ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); + return status; + } + + // build weights + std::vector const_parameters; + status = ConvertBlobs(weight, &const_parameters); + if (status != RET_OK) { + MS_LOG(ERROR) << "Convert blobs for " << layer.name() << " failed."; + ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); + return status; + } + + // build cnode + std::vector op_inputs = {NewValueNode(std::shared_ptr(primitive_c))}; + op_inputs.insert(op_inputs.end(), input_nodes.begin(), input_nodes.end()); + op_inputs.insert(op_inputs.end(), const_parameters.begin(), const_parameters.end()); + auto new_cnode = func_graph_ptr_->NewCNode(op_inputs); + new_cnode->set_fullname_with_scope(layer.name()); + + // convert outputs + status = ConvertTop(layer, new_cnode); + if (status != RET_OK) { + MS_LOG(ERROR) << "Convert outputs for " << layer.name() << " failed."; + ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); + return status; + } + + status = ConvertLayerQuantParams(layer, weight, primitive_c); + if (status != RET_OK) { + MS_LOG(ERROR) << "Convert quant params for " << layer.name() << " failed."; + ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); + return status; + } + } + return status; +} - caffe::NetParameter weight; - status = ReadProtoFromBinaryFile((const char *)weight_file.c_str(), &weight); +STATUS CaffeModelParser::InitOriginModel(const std::string &model_file, const std::string &weight_file) { + int status = ValidateFileStr(model_file, ".prototxt"); if (status != RET_OK) { - MS_LOG(ERROR) << "Read caffemodel file failed, model path: " << weight_file; + MS_LOG(ERROR) << "INPUT ILLEGAL: modelFile must be *.prototxt"; ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); - return nullptr; + return RET_INPUT_PARAM_INVALID; + } + + if (weight_file.empty()) { + MS_LOG(ERROR) << "INPUT MISSING: weightFile is necessary"; + ReturnCode::GetSingleReturnCode()->UpdateReturnCode(RET_GRAPH_FILE_ERR); + return RET_INPUT_PARAM_INVALID; } - status = GetModelInput(proto, &tensorCache); + status = ValidateFileStr(weight_file, ".caffemodel"); if (status != RET_OK) { - MS_LOG(ERROR) << "GetModelInput failed " << status; + MS_LOG(ERROR) << "INPUT ILLEGAL: weightFile must be *.caffemodel"; ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); - return nullptr; + return RET_INPUT_PARAM_INVALID; } - NoSupportOp::GetInstance()->SetFmkType("CAFFE"); - status = ParseLayer(proto, weight, &tensorCache, metaGraph.get(), quant_type); + status = ReadProtoFromText((const char *)model_file.c_str(), &caffe_model_); if (status != RET_OK) { - MS_LOG(ERROR) << "ParseLayer failed " << status; + MS_LOG(ERROR) << "Read prototxt file failed, model path: " << model_file; ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); - for (auto &tensor : tensorCache.GetCachedTensor()) { - delete tensor; - } - return nullptr; + return RET_ERROR; } - status = SetGraphTensorIndex(proto, &tensorCache, metaGraph.get()); + status = ReadProtoFromBinaryFile((const char *)weight_file.c_str(), &caffe_weight_); if (status != RET_OK) { - MS_LOG(ERROR) << "Set inputTensor index and outputTensor index for graph failed!"; + MS_LOG(ERROR) << "Read caffemodel file failed, model path: " << weight_file; ReturnCode::GetSingleReturnCode()->UpdateReturnCode(status); - return nullptr; + return RET_ERROR; } - metaGraph->name = GetModelName(model_file); - - SetAllTensors(tensorCache, metaGraph.get()); - - return metaGraph.release(); + return RET_OK; } -STATUS CaffeModelParser::SetOpInputIdx(const caffe::LayerParameter &layer, schema::CNodeT *op, - TensorCache *tensorCache) { - for (int i = 0; i < layer.bottom_size(); i++) { - int index = -1; - if (splitLayer.find(layer.bottom(i)) != splitLayer.end()) { - index = tensorCache->FindTensor(splitLayer.find(layer.bottom(i))->second); - } else { - index = tensorCache->FindTensor(layer.bottom(i)); - } - if (index >= 0) { - op->inputIndex.emplace_back(index); - } else { - MS_LOG(ERROR) << "Can't find input layer for " << layer.name().c_str(); - return RET_ERROR; +STATUS CaffeModelParser::ConvertGraphInputs() { + for (int i = 0; i < caffe_model_.layer_size(); i++) { + auto layer = caffe_model_.layer(i); + if (layer.type() == "Input") { + auto parameter = func_graph_ptr_->add_parameter(); + std::vector shape; + for (int j = 0; j < layer.input_param().shape(0).dim_size(); j++) { + shape.push_back(layer.input_param().shape(0).dim(j)); + } + auto type_ptr = TypeIdToType(TypeId::kNumberTypeFloat32); + auto abstract_tensor = std::make_shared(type_ptr, shape); + parameter->set_abstract(abstract_tensor); + parameter->set_name("graph-input-" + std::to_string(i)); + nodes_.insert(std::pair(layer.top(0), parameter)); + return RET_OK; } } - return RET_OK; -} -STATUS CaffeModelParser::SetOpOutputIdx(const caffe::LayerParameter &layer, schema::CNodeT *op, - TensorCache *tensorCache) { - for (int i = 0; i < layer.top_size(); i++) { - std::unique_ptr msTensor = std::make_unique(); - op->outputIndex.emplace_back(tensorCache->AddTensor(layer.top(i), msTensor.release(), OP_OUTPUT)); + if (caffe_model_.input_dim_size() > 0) { + for (int i = 0; i < caffe_model_.input_size(); i++) { + std::vector shape; + if (caffe_model_.input_dim_size() > 4) { + int step = caffe_model_.input_dim_size() / caffe_model_.input_size(); + for (int j = i * step; j < (i + 1) * step; j++) { + shape.push_back(caffe_model_.input_dim(j)); + } + } else { + for (int j = 0; j < caffe_model_.input_dim_size(); j++) { + shape.push_back(caffe_model_.input_dim(j)); + } + } + auto parameter = func_graph_ptr_->add_parameter(); + auto type_ptr = TypeIdToType(TypeId::kNumberTypeFloat32); + auto abstract_tensor = std::make_shared(type_ptr, shape); + parameter->set_abstract(abstract_tensor); + parameter->set_name("graph-input-" + caffe_model_.input(i)); + nodes_.insert(std::pair(caffe_model_.input(i), parameter)); + } + } else { + for (int i = 0; i < caffe_model_.input_shape_size(); i++) { + auto shape = caffe_model_.input_shape(i); + std::vector shape_vector; + for (int j = 0; j < shape.dim_size(); j++) { + shape_vector.push_back(shape.dim(j)); + } + auto parameter = func_graph_ptr_->add_parameter(); + auto type_ptr = TypeIdToType(TypeId::kNumberTypeFloat32); + auto abstract_tensor = std::make_shared(type_ptr, shape_vector); + parameter->set_abstract(abstract_tensor); + parameter->set_name("graph-input-" + caffe_model_.input(i)); + nodes_.insert(std::pair(caffe_model_.input(i), parameter)); + } } return RET_OK; } -STATUS CaffeModelParser::SetWeightTensor(const std::vector &weightVec, schema::CNodeT *op, - TensorCache *tensorCache) { - for (auto iter : weightVec) { - op->inputIndex.emplace_back(tensorCache->AddTensor("Weight", iter, CONST)); +STATUS CaffeModelParser::ConvertGraphOutputs() { + CaffeInspector caffeInspector; + caffeInspector.InspectModel(caffe_model_); + if (caffeInspector.GetGraphOutput().size() > 1) { + std::vector make_tuple_inputs; + auto make_tuple_prim_ptr = GetMakeTuplePrim(); + if (make_tuple_prim_ptr == nullptr) { + MS_LOG(ERROR) << "GetMakeTuplePrim return nullptr"; + return RET_NULL_PTR; + } + auto make_tuple_prim = NewValueNode(make_tuple_prim_ptr); + make_tuple_inputs.emplace_back(make_tuple_prim); + for (const auto &output_node : caffeInspector.GetGraphOutput()) { + if (nodes_.find(output_node) == nodes_.end()) { + MS_LOG(ERROR) << "Can't find input node."; + return RET_NOT_FIND_OP; + } + auto cnode = nodes_.find(output_node)->second; + make_tuple_inputs.emplace_back(cnode); + } + auto make_tuple_cnode = func_graph_ptr_->NewCNode(make_tuple_inputs); + make_tuple_cnode->set_fullname_with_scope("return tuple"); + + std::vector op_inputs; + auto return_prim_ptr = GetReturnPrim(); + if (return_prim_ptr == nullptr) { + MS_LOG(ERROR) << "GetReturnPrim return nullptr"; + return RET_NULL_PTR; + } + auto value_node = NewValueNode(return_prim_ptr); + op_inputs.emplace_back(value_node); + op_inputs.emplace_back(make_tuple_cnode); + auto cnode = func_graph_ptr_->NewCNode(op_inputs); + cnode->set_fullname_with_scope("return"); + func_graph_ptr_->set_return(cnode); + } else { + auto returnPrim = GetReturnPrim(); + if (returnPrim == nullptr) { + MS_LOG(ERROR) << "GetReturnPrim return nullptr"; + return RET_NULL_PTR; + } + auto valueNode = NewValueNode(returnPrim); + std::vector opInputs{valueNode}; + if (nodes_.find(*caffeInspector.GetGraphOutput().begin()) == nodes_.end()) { + MS_LOG(ERROR) << "Can't find input node."; + return RET_NOT_FIND_OP; + } + auto cnode = nodes_.find(*caffeInspector.GetGraphOutput().begin())->second; + if (nullptr == cnode) { + MS_LOG(ERROR) << "Can't find input node."; + return RET_NOT_FIND_OP; + } + opInputs.emplace_back(cnode); + auto returnCnode = func_graph_ptr_->NewCNode(opInputs); + returnCnode->set_fullname_with_scope("return"); + func_graph_ptr_->set_return(returnCnode); } return RET_OK; } -STATUS CaffeModelParser::SetAllTensors(const TensorCache &tensorCache, schema::MetaGraphT *subGraphDef) { - std::vector tensors = tensorCache.GetCachedTensor(); - for (auto iter : tensors) { - std::unique_ptr temp(iter); - subGraphDef->allTensors.emplace_back(move(temp)); +STATUS CaffeModelParser::ConvertLayerQuantParams(const caffe::LayerParameter &layer, + const caffe::LayerParameter &weight, lite::PrimitiveC *primitive_c) { + if (primitive_c == nullptr) { + MS_LOG(ERROR) << "primitive_c is null, get quant params failed."; + return RET_NULL_PTR; + } + for (auto input_idx : layer.bottom()) { + std::vector notinited_quant_params(1); + primitive_c->AddInputQuantParam(notinited_quant_params); + } + for (auto input_idx : weight.blobs()) { + std::vector notinited_quant_params(1); + primitive_c->AddInputQuantParam(notinited_quant_params); + } + for (auto output_idx : layer.top()) { + std::vector notinited_quant_params(1); + primitive_c->AddOutputQuantParam(notinited_quant_params); } return RET_OK; } -STATUS CaffeModelParser::SetGraphTensorIndex(const caffe::NetParameter &proto, TensorCache *tensorCache, - schema::MetaGraphT *subGraphDef) { - CaffeInspector caffeInspector; - caffeInspector.InspectModel(proto); - for (auto iter : caffeInspector.GetGraphInput()) { - int index = tensorCache->FindTensor(iter); - if (index >= 0) { - subGraphDef->inputIndex.emplace_back(index); - } else { - MS_LOG(ERROR) << "Can't find input tensor layer for graph."; - return RET_ERROR; - } +STATUS CaffeModelParser::ConvertBlobs(const caffe::LayerParameter &layer, std::vector *const_parameters) { + if (const_parameters == nullptr) { + MS_LOG(ERROR) << "const parameters are null"; + return RET_NULL_PTR; } - for (auto iter : caffeInspector.GetGraphOutput()) { - int index = -1; - if (splitLayer.find(iter) != splitLayer.end()) { - index = tensorCache->FindTensor(splitLayer.find(iter)->second); + // Layer must have Filter + if (layer.blobs_size() == 0) { + MS_LOG(INFO) << "No filter data in layer " << layer.name().c_str(); + return RET_OK; + } + for (int i = 0; i < layer.blobs_size(); i++) { + std::vector shape; + ConvertShape(layer.blobs(i), &shape); + + // cal Weight num + auto parameter = func_graph_ptr_->add_parameter(); + auto type_ptr = TypeIdToType(TypeId::kNumberTypeFloat32); + std::vector shape_vector; + (void)std::transform(shape.begin(), shape.end(), std::back_inserter(shape_vector), + [](const int32_t &value) { return static_cast(value); }); + auto abstract_tensor = std::make_shared(type_ptr, shape_vector); + parameter->set_abstract(abstract_tensor); + if (layer.type() == "Convolution" || layer.type() == "Deconvolution") { + if (i == 0) { + parameter->set_name(layer.name() + "/weight"); + } else if (i == 1) { + parameter->set_name(layer.name() + "/bias"); + } } else { - index = tensorCache->FindTensor(iter); + parameter->set_name(layer.name() + "/input-" + std::to_string(i + layer.top_size())); } - if (index >= 0) { - subGraphDef->outputIndex.emplace_back(index); + ParamValueLitePtr param_value = std::make_shared(); + MS_ASSERT(param_value != nullptr); + param_value->set_tensor_shape(shape); + param_value->set_tensor_type(TypeId::kNumberTypeFloat32); + param_value->set_format(schema::Format::Format_NCHW); + + int count = 0; + if (layer.blobs(i).double_data_size() > 0) { + count = layer.blobs(i).double_data_size(); + auto buf = std::make_unique(count); + for (int j = 0; j < count; ++j) { + buf[j] = layer.blobs(j).double_data(j); + } + param_value->set_tensor_addr(buf.release()); } else { - MS_LOG(ERROR) << "Can't find output tensor layer for graph."; - return RET_ERROR; + count = layer.blobs(i).data_size(); + auto buf = std::make_unique(count); + const float *data_ptr = layer.blobs(i).data().data(); + if (EOK != ::memcpy_s(buf.get(), count * sizeof(float), data_ptr, count * sizeof(float))) { + MS_LOG(ERROR) << "memcpy_s failed."; + return RET_ERROR; + } + param_value->set_tensor_addr(buf.release()); } + param_value->set_tensor_size(count * sizeof(float)); + parameter->set_default_param(param_value); + const_parameters->emplace_back(parameter); } return RET_OK; } -STATUS CaffeModelParser::ParseLayer(const caffe::NetParameter &proto, const caffe::NetParameter &weight, - TensorCache *tensorCache, schema::MetaGraphT *subGraphDef, - const QuantType &quantType) { - static bool interrupt = false; - int status = RET_OK; - for (int i = 0; i < proto.layer_size(); i++) { - auto layer = proto.layer(i); - - caffe::LayerParameter layerP; - for (int j = 0; j < weight.layer_size(); j++) { - auto tempLayer = weight.layer(j); - if (tempLayer.name() == layer.name()) { - layerP = tempLayer; - break; - } - } - if (layer.type() == "Input") { - std::unique_ptr msTensor = std::make_unique(); - for (int j = 0; j < layer.input_param().shape(0).dim_size(); j++) { - msTensor->dims.push_back(layer.input_param().shape(0).dim(j)); - } - msTensor->nodeType = schema::NodeType::NodeType_ValueNode; - msTensor->refCount = 1; - msTensor->dataType = kNumberTypeFloat32; - tensorCache->AddTensor(layer.top(0), msTensor.release(), GRAPH_INPUT); - } else { - if (skipedLayerType.find(layer.type()) != skipedLayerType.end()) { - MS_LOG(INFO) << "Skip layer " << layer.name(); - continue; - } - - // here we only process the bn with phase - if (layer.type() == "BatchNorm" && layer.include_size() == 1) { - if (layer.include(0).phase() == caffe::TRAIN) { - MS_LOG(INFO) << "Skip layer " << layer.name(); - continue; - } - } - - std::unique_ptr op = std::make_unique(); - op->name = layer.name(); - op->quantType = quantType; - if (layer.type() == "Split") { - for (int j = 0; j < layer.top_size(); ++j) { - splitLayer.emplace(layer.top(j), layer.bottom(0)); - } - continue; - } - - auto nodeParser = CaffeNodeParserRegistry::GetInstance()->GetNodeParser(layer.type().c_str()); - if (nodeParser == nullptr || interrupt) { - interrupt = true; - if (nodeParser == nullptr) { - NoSupportOp::GetInstance()->InsertOp(layer.type()); - status = (status == RET_OK ? RET_NOT_FIND_OP : status); - } - continue; - } - - std::vector weightVec; - auto status_node = nodeParser->Parse(layer, layerP, op.get(), &weightVec); - if (status_node != RET_OK) { - interrupt = true; - if (status_node == RET_NOT_FIND_OP) { - NoSupportOp::GetInstance()->InsertOp(layer.type()); - } else { - MS_LOG(ERROR) << "Parse weight for " << layer.name() << " Failed!"; - } - status = (status == RET_OK ? status_node : status); - continue; - } - - status_node = SetOpInputIdx(layer, op.get(), tensorCache); - if (status_node != RET_OK) { - MS_LOG(ERROR) << "Set Op " << layer.name() << " Input Index Failed!"; - status = (status == RET_OK ? status_node : status); - } - SetWeightTensor(weightVec, op.get(), tensorCache); - - status_node = SetOpOutputIdx(layer, op.get(), tensorCache); - if (status_node != RET_OK) { - interrupt = true; - MS_LOG(ERROR) << "Set Op " << layer.name() << " Output Index Failed!"; - status = (status == RET_OK ? status_node : status); - continue; - } - - // op->fmkType = FmkType_CAFFE; - subGraphDef->nodes.emplace_back(move(op)); +STATUS CaffeModelParser::ConvertBottom(const caffe::LayerParameter &layer, std::vector *input_nodes) { + if (input_nodes == nullptr) { + MS_LOG(ERROR) << "input_nodes is null"; + return RET_NULL_PTR; + } + for (int i = 0; i < layer.bottom_size(); i++) { + if (nodes_.find(layer.bottom(i)) == nodes_.end()) { + MS_LOG(ERROR) << "layer bottom " << layer.bottom(i) << " is not found"; + return RET_NOT_FIND_OP; } + input_nodes->emplace_back(nodes_.find(layer.bottom(i))->second); } - return status; + return RET_OK; } -STATUS CaffeModelParser::GetModelInput(const caffe::NetParameter &proto, TensorCache *tensorCache) { - for (int i = 0; i < proto.input_size(); i++) { - if (proto.input_dim_size() <= 0) { - continue; - } - std::unique_ptr msTensor = std::make_unique(); - if (proto.input_dim_size() > 4) { - int step = proto.input_dim_size() / proto.input_size(); - for (int j = i * step; j < (i + 1) * step; j++) { - msTensor->dims.push_back(proto.input_dim(j)); - } - } else { - for (int j = 0; j < proto.input_dim_size(); j++) { - msTensor->dims.push_back(proto.input_dim(j)); - } - } - msTensor->refCount = schema::NodeType::NodeType_ValueNode; - msTensor->dataType = kNumberTypeFloat32; - tensorCache->AddTensor(proto.input(i), msTensor.release(), GRAPH_INPUT); +STATUS CaffeModelParser::ConvertTop(const caffe::LayerParameter &layer, const CNodePtr &cnode) { + auto type_ptr = TypeIdToType(TypeId::kNumberTypeFloat32); + std::vector shape_vector; + if (layer.top_size() == 1) { + cnode->set_abstract(std::make_shared(type_ptr, shape_vector)); + nodes_[layer.top(0)] = cnode; + return RET_OK; } - for (int i = 0; i < proto.input_shape_size(); i++) { - auto shape = proto.input_shape(i); - std::unique_ptr msTensor = std::make_unique(); - for (int j = 0; j < shape.dim_size(); j++) { - msTensor->dims.push_back(shape.dim(j)); + AbstractBasePtrList abstract_list; + for (int i = 0; i < layer.top_size(); i++) { + abstract_list.emplace_back(std::make_shared(type_ptr, shape_vector)); + auto tuple_get_item_prim_ptr = GetTupleGetItemPrim(); + if (tuple_get_item_prim_ptr == nullptr) { + MS_LOG(ERROR) << "GetTupleGetItemPrim return nullptr"; + return RET_NULL_PTR; } - msTensor->refCount = schema::NodeType::NodeType_ValueNode; - msTensor->dataType = kNumberTypeFloat32; - tensorCache->AddTensor(proto.input(i), msTensor.release(), GRAPH_INPUT); + auto tuple_get_item_prim = NewValueNode(tuple_get_item_prim_ptr); + auto get_item_value = NewValueNode(MakeValue(i)); + std::vector inputs{tuple_get_item_prim, cnode, get_item_value}; + CNodePtr get_item_cnode = func_graph_ptr_->NewCNode(inputs); + get_item_cnode->set_fullname_with_scope(layer.top(i)); + nodes_[layer.top(i)] = get_item_cnode; } + cnode->set_abstract(std::make_shared(abstract_list)); return RET_OK; } -} // namespace lite -} // namespace mindspore + +bool CaffeModelParser::IsSkipedLayer(const caffe::LayerParameter &layer) { + if (layer.type() == "Input" || layer.type() == "Dropout") { + return true; + } + return layer.include_size() == 1 && layer.include(0).phase() == caffe::TRAIN; +} + +MetaGraphT *CaffeModelParser::ParseToFb(const std::string &model_file, const std::string &weight_file, + const QuantType &quant_type) { + return nullptr; +} + +} // namespace mindspore::lite diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_model_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_model_parser.h index c5c8c5571a..ca61a934a0 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_model_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_model_parser.h @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - #ifndef MINDSPORE_LITE_TOOLS_CONVERTER_PARSER_CAFFE_CAFFE_MODEL_PARSER_H_ #define MINDSPORE_LITE_TOOLS_CONVERTER_PARSER_CAFFE_CAFFE_MODEL_PARSER_H_ @@ -24,41 +23,45 @@ #include #include "tools/converter/model_parser.h" #include "proto/caffe.pb.h" -#include "tools/common/tensor_util.h" -namespace mindspore { -namespace lite { +namespace mindspore::lite { class CaffeModelParser : public ModelParser { public: CaffeModelParser(); - virtual ~CaffeModelParser(); + ~CaffeModelParser() override; + + FuncGraphPtr Parse(const std::string &model_file, const std::string &weight_file, + const QuantType &quant_type) override; + + MetaGraphT *ParseToFb(const std::string &model_file, const std::string &weight_file, + const QuantType &quant_type) override; private: - schema::MetaGraphT *ParseToFb(const std::string &model_file, const std::string &weight_file, - const QuantType &quant_type = QuantType_QUANT_NONE) override; + STATUS InitOriginModel(const std::string &model_file, const std::string &weight_file); - STATUS SetOpInputIdx(const caffe::LayerParameter &layer, schema::CNodeT *op, TensorCache *tensorCache); + STATUS ConvertGraphInputs(); - STATUS SetOpOutputIdx(const caffe::LayerParameter &layer, schema::CNodeT *op, TensorCache *tensorCache); + STATUS ConvertGraphOutputs(); - STATUS SetWeightTensor(const std::vector &weightVec, schema::CNodeT *op, TensorCache *tensorCache); + STATUS ConvertLayers(); - STATUS SetAllTensors(const TensorCache &tensorCache, schema::MetaGraphT *subGraphDef); + STATUS ConvertLayerQuantParams(const caffe::LayerParameter &layer, const caffe::LayerParameter &weight, + lite::PrimitiveC *primitive_c); - STATUS SetGraphTensorIndex(const caffe::NetParameter &proto, TensorCache *tensorCache, - schema::MetaGraphT *subGraphDef); + STATUS ConvertBlobs(const caffe::LayerParameter &layer, std::vector *const_parameters); - STATUS ParseLayer(const caffe::NetParameter &proto, const caffe::NetParameter &weight, TensorCache *tensorCache, - schema::MetaGraphT *subGraphDef, const QuantType &quantType); + STATUS ConvertBottom(const caffe::LayerParameter &layer, std::vector *input_nodes); - STATUS GetModelInput(const caffe::NetParameter &proto, TensorCache *tensorCache); + STATUS ConvertTop(const caffe::LayerParameter &layer, const CNodePtr &cnode); - static const std::set skipedLayerType; + bool IsSkipedLayer(const caffe::LayerParameter &layer); - std::unordered_map splitLayer; + caffe::NetParameter caffe_model_; + caffe::NetParameter caffe_weight_; + std::unordered_map nodes_; + FuncGraphPtr func_graph_ptr_; }; -} // namespace lite -} // namespace mindspore +} // namespace mindspore::lite #endif // MINDSPORE_LITE_TOOLS_CONVERTER_PARSER_CAFFE_CAFFE_MODEL_PARSER_H_ diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_node_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_node_parser.h index e01181d1dd..c713436dee 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_node_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_node_parser.h @@ -19,6 +19,8 @@ #include #include +#include "src/ops/primitive_c.h" +#include "c_ops/primitive_c.h" #include "google/protobuf/message.h" #include "schema/inner/model_generated.h" #include "proto/caffe.pb.h" @@ -34,8 +36,10 @@ class CaffeNodeParser { virtual ~CaffeNodeParser() {} - virtual int Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) = 0; + virtual lite::PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { + return nullptr; + } protected: const std::string name; diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_permute_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_permute_parser.cc index d1a75ba2b2..e552e4c34e 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_permute_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_permute_parser.cc @@ -19,23 +19,12 @@ namespace mindspore { namespace lite { -STATUS CaffePermuteParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffePermuteParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffePermuteParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } const caffe::PermuteParameter &permuteParam = proto.permute_param(); @@ -45,11 +34,10 @@ STATUS CaffePermuteParser::Parse(const caffe::LayerParameter &proto, const caffe attr->perm[i] = (int32_t)permuteParam.order()[i]; } attr->conjugate = false; - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Transpose; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Transpose; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffePermuteParser("Permute", new CaffePermuteParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_permute_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_permute_parser.h index 7d43ff3d3b..ae19bc391c 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_permute_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_permute_parser.h @@ -28,8 +28,7 @@ class CaffePermuteParser : public CaffeNodeParser { CaffePermuteParser() : CaffeNodeParser("Permute") {} ~CaffePermuteParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_pooling_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_pooling_parser.cc index de3329283a..7964fdab79 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_pooling_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_pooling_parser.cc @@ -19,67 +19,6 @@ namespace mindspore { namespace lite { -STATUS CaffePoolingParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffePoolingParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - - std::unique_ptr attr = std::make_unique(); - if (attr == nullptr) { - MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; - } - - attr->format = schema::Format::Format_NCHW; - - const caffe::PoolingParameter &poolingParam = proto.pooling_param(); - auto status = ParsePads(poolingParam, attr.get()); - if (status != RET_OK) { - MS_LOG(ERROR) << "ParsePads for " << proto.name().c_str() << " failed"; - return RET_ERROR; - } - - status = ParseStrides(poolingParam, attr.get()); - if (status != RET_OK) { - MS_LOG(ERROR) << "ParseStrides for " << proto.name().c_str() << " failed"; - return RET_ERROR; - } - - status = ParseWindows(poolingParam, attr.get()); - if (status != RET_OK) { - MS_LOG(ERROR) << "ParseWindows for " << proto.name().c_str() << " failed"; - return RET_ERROR; - } - - status = ParsePoolingMode(poolingParam, attr.get()); - if (status != RET_OK) { - MS_LOG(ERROR) << "ParsePoolingMode for " << proto.name().c_str() << " failed"; - return RET_ERROR; - } - - attr->roundMode = schema::RoundMode_CEIL; - if (poolingParam.has_round_mode()) { - if (poolingParam.round_mode() == caffe::PoolingParameter_RoundMode_FLOOR) { - attr->roundMode = schema::RoundMode_FLOOR; - } else if (poolingParam.round_mode() == caffe::PoolingParameter_RoundMode_CEIL) { - attr->roundMode = schema::RoundMode_CEIL; - } - } - attr->padMode = schema::PadMode_CAFFE; - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Pooling; - op->primitive->value.value = attr.release(); - return RET_OK; -} STATUS CaffePoolingParser::ParsePads(const caffe::PoolingParameter &poolingParam, schema::PoolingT *attr) { if (poolingParam.has_pad_h() && poolingParam.has_pad_w()) { @@ -156,6 +95,55 @@ STATUS CaffePoolingParser::ParsePoolingMode(const caffe::PoolingParameter &pooli } return RET_OK; } +PrimitiveC *CaffePoolingParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { + std::unique_ptr attr = std::make_unique(); + if (attr == nullptr) { + MS_LOG(ERROR) << "new op failed"; + return nullptr; + } + + attr->format = schema::Format::Format_NCHW; + + const caffe::PoolingParameter &poolingParam = proto.pooling_param(); + auto status = ParsePads(poolingParam, attr.get()); + if (status != RET_OK) { + MS_LOG(ERROR) << "ParsePads for " << proto.name().c_str() << " failed"; + return nullptr; + } + + status = ParseStrides(poolingParam, attr.get()); + if (status != RET_OK) { + MS_LOG(ERROR) << "ParseStrides for " << proto.name().c_str() << " failed"; + return nullptr; + } + + status = ParseWindows(poolingParam, attr.get()); + if (status != RET_OK) { + MS_LOG(ERROR) << "ParseWindows for " << proto.name().c_str() << " failed"; + return nullptr; + } + + status = ParsePoolingMode(poolingParam, attr.get()); + if (status != RET_OK) { + MS_LOG(ERROR) << "ParsePoolingMode for " << proto.name().c_str() << " failed"; + return nullptr; + } + + attr->roundMode = schema::RoundMode_CEIL; + if (poolingParam.has_round_mode()) { + if (poolingParam.round_mode() == caffe::PoolingParameter_RoundMode_FLOOR) { + attr->roundMode = schema::RoundMode_FLOOR; + } else if (poolingParam.round_mode() == caffe::PoolingParameter_RoundMode_CEIL) { + attr->roundMode = schema::RoundMode_CEIL; + } + } + attr->padMode = schema::PadMode_CAFFE; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Pooling; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); +} CaffeNodeRegistrar g_caffePoolingParser("Pooling", new CaffePoolingParser()); } // namespace lite diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_pooling_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_pooling_parser.h index 9978f55e31..f0d62c25db 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_pooling_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_pooling_parser.h @@ -28,8 +28,7 @@ class CaffePoolingParser : public CaffeNodeParser { CaffePoolingParser() : CaffeNodeParser("pooling") {} ~CaffePoolingParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; static STATUS ParsePads(const caffe::PoolingParameter &poolingParam, schema::PoolingT *attr); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_power_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_power_parser.cc index ceb704870c..78e6ce9cab 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_power_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_power_parser.cc @@ -20,23 +20,12 @@ namespace mindspore { namespace lite { -STATUS CaffePowerParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffePowerParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffePowerParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } const caffe::PowerParameter &powerParam = proto.power_param(); @@ -50,10 +39,10 @@ STATUS CaffePowerParser::Parse(const caffe::LayerParameter &proto, const caffe:: attr->shift = 0.0; } - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Power; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Power; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffePowerParser("Power", new CaffePowerParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_power_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_power_parser.h index 9c5c8b9c3e..89c67763a1 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_power_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_power_parser.h @@ -28,8 +28,7 @@ class CaffePowerParser : public CaffeNodeParser { CaffePowerParser() : CaffeNodeParser("power") {} ~CaffePowerParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_prelu_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_prelu_parser.cc index dc75a3928d..ec3f35aab3 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_prelu_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_prelu_parser.cc @@ -19,27 +19,12 @@ namespace mindspore { namespace lite { -STATUS CaffePReluParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffePReluParser"; - if (weightVec == nullptr) { - MS_LOG(ERROR) << "weightVec is null"; - return RET_NULL_PTR; - } - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffePReluParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } const caffe::PReLUParameter &pReluParam = proto.prelu_param(); @@ -48,22 +33,10 @@ STATUS CaffePReluParser::Parse(const caffe::LayerParameter &proto, const caffe:: } else { attr->channelShared = false; } - - if (weight.blobs_size() == 0) { - MS_LOG(ERROR) << "PRelu No blobs data in layer " << proto.name().c_str(); - return RET_ERROR; - } - auto slope = ConvertWeight(weight.blobs(0)); - if (slope == nullptr) { - MS_LOG(ERROR) << "CaffePRelu convert slope for layer " << weight.name().c_str() << " failed."; - return RET_ERROR; - } - weightVec->push_back(slope); - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_PReLU; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_PReLU; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffePReluParser("PReLU", new CaffePReluParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_prelu_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_prelu_parser.h index f921e55233..2a1e715d16 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_prelu_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_prelu_parser.h @@ -28,8 +28,7 @@ class CaffePReluParser : public CaffeNodeParser { CaffePReluParser() : CaffeNodeParser("pRelu") {} ~CaffePReluParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_reduce_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_reduce_parser.cc index 8264e36cc5..17f8fbf304 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_reduce_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_reduce_parser.cc @@ -20,64 +20,25 @@ namespace mindspore { namespace lite { -STATUS CaffeReduceParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeReduceParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - - std::unique_ptr attr = std::make_unique(); +PrimitiveC *CaffeReduceParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { + std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } - const caffe::ReductionParameter &reduce_param = proto.reduction_param(); - if (reduce_param.has_operation()) { - switch (reduce_param.operation()) { - case caffe::ReductionParameter_ReductionOp_MEAN: - attr->mode = schema::ReduceMode_ReduceMean; - break; - case caffe::ReductionParameter_ReductionOp_SUM: - attr->mode = schema::ReduceMode_ReduceSum; - break; - case caffe::ReductionParameter_ReductionOp_SUMSQ: - attr->mode = schema::ReduceMode_ReduceSumSquare; - break; - case caffe::ReductionParameter_ReductionOp_ASUM: - attr->mode = schema::ReduceMode_ReduceASum; - break; - default: - MS_LOG(ERROR) << "reduce parse params fail, unsupported opration: " << reduce_param.operation(); - return RET_ERROR; - } - } else { - attr->mode = schema::ReduceMode_ReduceSum; - } - if (reduce_param.has_axis()) { - attr->axes = std::vector(1, reduce_param.axis()); - } else { - attr->axes = std::vector(1, 0); - } - if (reduce_param.has_coeff()) { - attr->coeff = reduce_param.coeff(); + const caffe::PReLUParameter &pReluParam = proto.prelu_param(); + if (pReluParam.has_channel_shared()) { + attr->channelShared = pReluParam.channel_shared(); } else { - attr->coeff = 1.0; + attr->channelShared = false; } - attr->reduceToEnd = true; - attr->keepDims = false; - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Reduce; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Reduce; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeReduceParser("Reduction", new CaffeReduceParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_reduce_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_reduce_parser.h index 2ccc69879a..f818e0a114 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_reduce_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_reduce_parser.h @@ -28,8 +28,7 @@ class CaffeReduceParser : public CaffeNodeParser { CaffeReduceParser() : CaffeNodeParser("reduce") {} ~CaffeReduceParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_relu6_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_relu6_parser.cc index becae39f93..345fbf72e1 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_relu6_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_relu6_parser.cc @@ -19,23 +19,12 @@ namespace mindspore { namespace lite { -STATUS CaffeRelu6Parser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeRelu6Parser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeRelu6Parser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr(new schema::ActivationT()); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } attr->type = schema::ActivationType_RELU6; @@ -46,11 +35,10 @@ STATUS CaffeRelu6Parser::Parse(const caffe::LayerParameter &proto, const caffe:: attr->alpha = negative_slope; } } - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Activation; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Activation; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeRelu6Parser("ReLU6", new CaffeRelu6Parser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_relu6_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_relu6_parser.h index 09c620b03b..82b6256e8e 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_relu6_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_relu6_parser.h @@ -27,8 +27,7 @@ class CaffeRelu6Parser : public CaffeNodeParser { CaffeRelu6Parser() : CaffeNodeParser("relu6") {} ~CaffeRelu6Parser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_relu_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_relu_parser.cc index f94308402f..110be37d9c 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_relu_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_relu_parser.cc @@ -19,23 +19,12 @@ namespace mindspore { namespace lite { -STATUS CaffeReluParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeReluParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeReluParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } attr->type = schema::ActivationType_RELU; @@ -46,11 +35,10 @@ STATUS CaffeReluParser::Parse(const caffe::LayerParameter &proto, const caffe::L attr->alpha = negative_slope; } } - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Activation; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Activation; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeReluParser("ReLU", new CaffeReluParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_relu_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_relu_parser.h index 72aed1222c..f76d1816a2 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_relu_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_relu_parser.h @@ -28,8 +28,7 @@ class CaffeReluParser : public CaffeNodeParser { CaffeReluParser() : CaffeNodeParser("relu") {} ~CaffeReluParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_reshape_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_reshape_parser.cc index cd3331345e..7c9aaf94a0 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_reshape_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_reshape_parser.cc @@ -19,23 +19,12 @@ namespace mindspore { namespace lite { -STATUS CaffeReshapeParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeReshapeParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeReshapeParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } attr->format = schema::Format::Format_NCHW; @@ -43,18 +32,17 @@ STATUS CaffeReshapeParser::Parse(const caffe::LayerParameter &proto, const caffe const caffe::ReshapeParameter &reshapeParam = proto.reshape_param(); if (!reshapeParam.has_shape()) { MS_LOG(ERROR) << "Reshape has no shape info, ret fail"; - return RET_ERROR; + return nullptr; } const caffe::BlobShape &blob_shape = reshapeParam.shape(); for (int i = 0; i < blob_shape.dim_size(); i++) { attr->shape.push_back(blob_shape.dim(i)); } - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Reshape; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Reshape; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeReshapeParser("Reshape", new CaffeReshapeParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_reshape_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_reshape_parser.h index 6de6736e41..55c4aca68d 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_reshape_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_reshape_parser.h @@ -28,8 +28,7 @@ class CaffeReshapeParser : public CaffeNodeParser { CaffeReshapeParser() : CaffeNodeParser("reshape") {} ~CaffeReshapeParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_scale_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_scale_parser.cc index b434573834..082f621e29 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_scale_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_scale_parser.cc @@ -19,33 +19,18 @@ namespace mindspore { namespace lite { -STATUS CaffeScaleParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeScaleParser"; - if (weightVec == nullptr) { - MS_LOG(ERROR) << "weightVec is null"; - return RET_NULL_PTR; - } - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeScaleParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } if (weight.blobs_size() + weight.bottom_size() < 2) { MS_LOG(ERROR) << "Scale bottom size:" << weight.bottom_size() << ", blobs size:" << weight.blobs_size() << " invalid in layer " << weight.name().c_str(); - return RET_ERROR; + return nullptr; } const caffe::ScaleParameter &scaleParam = weight.scale_param(); @@ -53,43 +38,14 @@ STATUS CaffeScaleParser::Parse(const caffe::LayerParameter &proto, const caffe:: uint32_t axis_index = 1; if (GetAxisIndex(scaleParam.axis(), &axis_index)) { MS_LOG(ERROR) << "scale get axis failed for layer " << weight.name().c_str(); - return RET_ERROR; + return nullptr; } } attr->axis = 1; - - // parse scale - if (weight.blobs().size() == 1) { - auto scale = ConvertWeight(weight.blobs(0)); - if (scale == nullptr) { - MS_LOG(ERROR) << "Scale Convert blobs(0) for layer " << weight.name().c_str() << " failed."; - return RET_ERROR; - } - weightVec->push_back(scale); - } else if (weight.blobs().size() >= 2) { - auto scale = ConvertWeight(weight.blobs(0)); - if (scale == nullptr) { - MS_LOG(ERROR) << "Scale Convert blobs(0) for layer " << weight.name().c_str() << " failed."; - return RET_ERROR; - } - weightVec->push_back(scale); - - // parse bias - bool scaleBias = scaleParam.bias_term(); - if (scaleBias) { - auto bias = ConvertWeight(weight.blobs_size() > 1 ? weight.blobs(1) : weight.blobs(0)); - if (bias == nullptr) { - MS_LOG(ERROR) << "Scale Convert blobs(1) for layer " << weight.name().c_str() << " failed."; - return RET_ERROR; - } - weightVec->push_back(bias); - } - } - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Scale; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Scale; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } STATUS CaffeScaleParser::GetAxisIndex(const int32_t &axis, uint32_t *axis_index) { diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_scale_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_scale_parser.h index 10dcc708da..ab34a2e491 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_scale_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_scale_parser.h @@ -28,8 +28,7 @@ class CaffeScaleParser : public CaffeNodeParser { CaffeScaleParser() : CaffeNodeParser("scale") {} ~CaffeScaleParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; static STATUS GetAxisIndex(const int32_t &axis, uint32_t *axis_index); }; diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_sigmoid_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_sigmoid_parser.cc index da70b3a796..f8ff9ccf85 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_sigmoid_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_sigmoid_parser.cc @@ -19,31 +19,19 @@ namespace mindspore { namespace lite { -STATUS CaffeSigmoidParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeSigmoidParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeSigmoidParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } attr->type = schema::ActivationType_SIGMOID; - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Activation; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Activation; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeSigmoidParser("Sigmoid", new CaffeSigmoidParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_sigmoid_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_sigmoid_parser.h index d2e32523b3..fd2f730981 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_sigmoid_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_sigmoid_parser.h @@ -28,8 +28,7 @@ class CaffeSigmoidParser : public CaffeNodeParser { CaffeSigmoidParser() : CaffeNodeParser("sigmoid") {} ~CaffeSigmoidParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_slice_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_slice_parser.cc index 976718874e..c9df8641d5 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_slice_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_slice_parser.cc @@ -19,23 +19,12 @@ namespace mindspore { namespace lite { -STATUS CaffeSliceParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeSliceParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeSliceParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } const caffe::SliceParameter &slice_param = proto.slice_param(); @@ -60,11 +49,10 @@ STATUS CaffeSliceParser::Parse(const caffe::LayerParameter &proto, const caffe:: } else if (slice_param.has_slice_dim()) { attr->splitDim = slice_param.slice_dim(); } - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Split; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Split; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeSliceParser("Slice", new CaffeSliceParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_slice_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_slice_parser.h index 2500a9fe31..578faad338 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_slice_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_slice_parser.h @@ -28,8 +28,7 @@ class CaffeSliceParser : public CaffeNodeParser { CaffeSliceParser() : CaffeNodeParser("slice") {} ~CaffeSliceParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_softmax_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_softmax_parser.cc index 9c6e0e9258..d5d8667f84 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_softmax_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_softmax_parser.cc @@ -19,23 +19,12 @@ namespace mindspore { namespace lite { -STATUS CaffeSoftmaxParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeSoftmaxParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeSoftmaxParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } if (proto.has_softmax_param() && proto.softmax_param().has_axis()) { @@ -46,11 +35,10 @@ STATUS CaffeSoftmaxParser::Parse(const caffe::LayerParameter &proto, const caffe } else { attr->axis = 1; } - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_SoftMax; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_SoftMax; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeSoftmaxParser("Softmax", new CaffeSoftmaxParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_softmax_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_softmax_parser.h index 796df196b1..2da6c324ee 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_softmax_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_softmax_parser.h @@ -28,8 +28,7 @@ class CaffeSoftmaxParser : public CaffeNodeParser { CaffeSoftmaxParser() : CaffeNodeParser("softmax") {} ~CaffeSoftmaxParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_tanh_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_tanh_parser.cc index e88121cac4..49b00cf7bf 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_tanh_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_tanh_parser.cc @@ -20,30 +20,18 @@ namespace mindspore { namespace lite { -STATUS CaffeTanhParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeTanhParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeTanhParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr(new schema::ActivationT()); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } attr->type = schema::ActivationType_TANH; - - op->name = proto.name(); - op->primitive->value.type = schema::PrimitiveType_Activation; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Activation; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeTanhParser("TanH", new CaffeTanhParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_tanh_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_tanh_parser.h index 03f7b32d28..c721b1b547 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_tanh_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_tanh_parser.h @@ -28,8 +28,7 @@ class CaffeTanhParser : public CaffeNodeParser { CaffeTanhParser() : CaffeNodeParser("tanh") {} ~CaffeTanhParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_tile_parser.cc b/mindspore/lite/tools/converter/parser/caffe/caffe_tile_parser.cc index 4cd060fd10..10319f757e 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_tile_parser.cc +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_tile_parser.cc @@ -20,23 +20,12 @@ namespace mindspore { namespace lite { -STATUS CaffeTileParser::Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, - schema::CNodeT *op, std::vector *weightVec) { - MS_LOG(DEBUG) << "parse CaffeTileParser"; - if (op == nullptr) { - MS_LOG(ERROR) << "op is null"; - return RET_NULL_PTR; - } - op->primitive = std::make_unique(); - if (op->primitive == nullptr) { - MS_LOG(ERROR) << "op->primitive is null"; - return RET_NULL_PTR; - } - +PrimitiveC *CaffeTileParser::ParseLitePrimitive(const caffe::LayerParameter &proto, + const caffe::LayerParameter &weight) { std::unique_ptr attr = std::make_unique(); if (attr == nullptr) { MS_LOG(ERROR) << "new op failed"; - return RET_NULL_PTR; + return nullptr; } const caffe::TileParameter &tile_param = proto.tile_param(); @@ -57,10 +46,10 @@ STATUS CaffeTileParser::Parse(const caffe::LayerParameter &proto, const caffe::L attr->dims = dims; attr->multiples = multiples; - - op->primitive->value.type = schema::PrimitiveType_Tile; - op->primitive->value.value = attr.release(); - return RET_OK; + auto primitive = std::make_unique(); + primitive->value.type = schema::PrimitiveType_Tile; + primitive->value.value = attr.release(); + return PrimitiveC::Create(primitive.release()); } CaffeNodeRegistrar g_caffeTileParser("Tile", new CaffeTileParser()); diff --git a/mindspore/lite/tools/converter/parser/caffe/caffe_tile_parser.h b/mindspore/lite/tools/converter/parser/caffe/caffe_tile_parser.h index b9dee3c7d4..da906ba1b0 100644 --- a/mindspore/lite/tools/converter/parser/caffe/caffe_tile_parser.h +++ b/mindspore/lite/tools/converter/parser/caffe/caffe_tile_parser.h @@ -28,8 +28,7 @@ class CaffeTileParser : public CaffeNodeParser { CaffeTileParser() : CaffeNodeParser("tile") {} ~CaffeTileParser() override = default; - STATUS Parse(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight, schema::CNodeT *op, - std::vector *weightVec) override; + PrimitiveC *ParseLitePrimitive(const caffe::LayerParameter &proto, const caffe::LayerParameter &weight) override; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/parser/tflite/tflite_model_parser.cc b/mindspore/lite/tools/converter/parser/tflite/tflite_model_parser.cc index cd1e2ed6db..0e3cc0b8dd 100644 --- a/mindspore/lite/tools/converter/parser/tflite/tflite_model_parser.cc +++ b/mindspore/lite/tools/converter/parser/tflite/tflite_model_parser.cc @@ -103,7 +103,7 @@ STATUS TfliteModelParser::ConvertOps() { auto primitiveC = node_parser->ParseLitePrimitive(op, tflite_model_); if (primitiveC == nullptr) { - MS_LOG(ERROR) << "parse node " << op_type.c_str() << " parser failed"; + MS_LOG(ERROR) << "parse node " << op_name << " parser failed"; continue; } diff --git a/mindspore/lite/tools/optimizer/fusion/conv_bn_fusion.cc b/mindspore/lite/tools/optimizer/fusion/conv_bn_fusion.cc index 802a9f805c..a44352ba42 100644 --- a/mindspore/lite/tools/optimizer/fusion/conv_bn_fusion.cc +++ b/mindspore/lite/tools/optimizer/fusion/conv_bn_fusion.cc @@ -29,6 +29,7 @@ namespace mindspore::opt { namespace { constexpr size_t kCaffeBNMeanIndex = 2; constexpr size_t kCaffeBNVarIndex = 3; +constexpr size_t kCaffeBNScaleFactorIndex = 4; constexpr size_t kTFBNScaleIndex = 2; constexpr size_t kTFBNBiasIndex = 3; constexpr size_t kTFBNMeanIndex = 4; @@ -95,6 +96,34 @@ void CalTransBias(const AnfNodePtr &bn_mean_node, const AnfNodePtr &bn_bias_node } } } + +STATUS CalEstimatedData(const AnfNodePtr &origin_node, const AnfNodePtr &scale_factor_node) { + if (origin_node == nullptr) { + MS_LOG(ERROR) << "origin node is null"; + return RET_ERROR; + } + + if (scale_factor_node == nullptr) { + MS_LOG(ERROR) << "scale factor node is null"; + return RET_ERROR; + } + auto origin_param = origin_node->cast()->default_param(); + auto origin_tensor = std::dynamic_pointer_cast(origin_param); + auto origin_data = reinterpret_cast(origin_tensor->tensor_addr()); + + auto scale_factor_param = scale_factor_node->cast()->default_param(); + auto scale_factor_tensor = std::dynamic_pointer_cast(scale_factor_param); + if (scale_factor_tensor->tensor_shape_size() < 1) { + MS_LOG(ERROR) << "scale factor data size is not equal to 1"; + return RET_ERROR; + } + auto scale_factor_data = (reinterpret_cast(scale_factor_tensor->tensor_addr()))[0]; + float scale_factor = scale_factor_data == 0 ? 0 : 1 / scale_factor_data; + for (int i = 0; i < origin_tensor->tensor_shape_size(); i++) { + origin_data[i] = origin_data[i] * scale_factor; + } + return RET_OK; +} } // namespace const BaseRef ConvBatchNormFusion::DefinePattern() const { auto conv_var = std::make_shared(IsConvNode); @@ -106,8 +135,9 @@ const BaseRef ConvBatchNormFusion::DefinePattern() const { } // BatchNorm weight Tensor definition: // caffe -// estimated_mean --0 -// estimated_variance --1 +// mean --0 +// variance --1 +// scale_factor --2 // tensorflow // scale -- 0 // bias --1 @@ -127,13 +157,17 @@ void ConvBatchNormFusion::InitTransParam(const CNodePtr &bn_node, int kernel_num if (GetCNodeType(bn_node) == schema::PrimitiveType_BatchNorm) { bn_mean_node = bn_node->input(kCaffeBNMeanIndex); bn_variance_node = bn_node->input(kCaffeBNVarIndex); - if (CheckIfNodeIsParam(bn_mean_node) != lite::RET_OK || CheckIfNodeIsParam(bn_variance_node) != lite::RET_OK) { + AnfNodePtr bn_scale_factor_node = bn_node->input(kCaffeBNScaleFactorIndex); + if (CheckIfNodeIsParam(bn_mean_node) != lite::RET_OK || CheckIfNodeIsParam(bn_variance_node) != lite::RET_OK || + CheckIfNodeIsParam(bn_scale_factor_node) != lite::RET_OK) { return; } MS_ASSERT(utils::isa>(primitive_c)); auto primc = utils::cast>(primitive_c); MS_ASSERT(primc != nullptr); eps = primc->GetEpsilon(); + CalEstimatedData(bn_mean_node, bn_scale_factor_node); + CalEstimatedData(bn_variance_node, bn_scale_factor_node); } else if (GetCNodeType(bn_node) == schema::PrimitiveType_FusedBatchNorm) { bn_scale_node = bn_node->input(kTFBNScaleIndex); bn_bias_node = bn_node->input(kTFBNBiasIndex);