From 2ef5d4a0f2eeaa5c6efb69b595064b33ba5ebb02 Mon Sep 17 00:00:00 2001 From: zengxianglong Date: Thu, 4 Feb 2021 09:32:14 +0800 Subject: [PATCH] fix the bug that subgraph outputs are ommited in special case --- mindspore/lite/src/lite_kernel.cc | 29 ++++++++++--------- mindspore/lite/src/lite_session.cc | 4 +++ mindspore/lite/src/tensor.h | 3 ++ .../lite/test/models_for_process_only.cfg | 1 - .../lite/test/models_with_multiple_inputs.cfg | 1 + mindspore/lite/test/run_benchmark_nets.sh | 20 ++++++++----- 6 files changed, 36 insertions(+), 22 deletions(-) diff --git a/mindspore/lite/src/lite_kernel.cc b/mindspore/lite/src/lite_kernel.cc index 78352aa43e..cf3e745043 100644 --- a/mindspore/lite/src/lite_kernel.cc +++ b/mindspore/lite/src/lite_kernel.cc @@ -315,23 +315,24 @@ std::vector LiteKernelUtil::SubgraphOutputTensors(const std::vec for (const auto &output_kernel : output_nodes) { auto &outer_out_kernels = output_kernel->out_kernels(); auto &out_kernel_out_tensors = output_kernel->out_tensors(); - if (outer_out_kernels.empty()) { - for (auto out_kernel_out_tensor : out_kernel_out_tensors) { + for (auto out_kernel_out_tensor : out_kernel_out_tensors) { + if (out_kernel_out_tensor->IsGraphOutput()) { output_tensors.insert(out_kernel_out_tensor); } - continue; } - for (auto outer_out_kernel : outer_out_kernels) { - auto iter = std::find(kernels.begin(), kernels.end(), outer_out_kernel); - if (iter != kernels.end()) { - continue; - } - auto &outer_out_kernel_in_tensors = outer_out_kernel->in_tensors(); - for (auto out_kernel_out_tensor : out_kernel_out_tensors) { - auto outer_out_kernel_in_tensors_iter = - std::find(outer_out_kernel_in_tensors.begin(), outer_out_kernel_in_tensors.end(), out_kernel_out_tensor); - if (outer_out_kernel_in_tensors_iter != outer_out_kernel_in_tensors.end()) { - output_tensors.insert(out_kernel_out_tensor); + if (!outer_out_kernels.empty()) { + for (auto outer_out_kernel : outer_out_kernels) { + auto iter = std::find(kernels.begin(), kernels.end(), outer_out_kernel); + if (iter != kernels.end()) { + continue; + } + auto &outer_out_kernel_in_tensors = outer_out_kernel->in_tensors(); + for (auto out_kernel_out_tensor : out_kernel_out_tensors) { + auto outer_out_kernel_in_tensors_iter = + std::find(outer_out_kernel_in_tensors.begin(), outer_out_kernel_in_tensors.end(), out_kernel_out_tensor); + if (outer_out_kernel_in_tensors_iter != outer_out_kernel_in_tensors.end()) { + output_tensors.insert(out_kernel_out_tensor); + } } } } diff --git a/mindspore/lite/src/lite_session.cc b/mindspore/lite/src/lite_session.cc index 732fc7a9a8..e8e678631c 100644 --- a/mindspore/lite/src/lite_session.cc +++ b/mindspore/lite/src/lite_session.cc @@ -176,6 +176,7 @@ int LiteSession::ConvertTensors(const lite::Model *model) { uint32_t tensor_count = model->all_tensors_.size(); MS_ASSERT(!model->sub_graphs_.empty()); auto model_input_indices = model->sub_graphs_.front()->input_indices_; + auto model_output_indices = model->sub_graphs_.front()->output_indices_; for (uint32_t i = 0; i < tensor_count; ++i) { auto *src_tensor = model->all_tensors_[i]; if (src_tensor == nullptr) { @@ -197,6 +198,9 @@ int LiteSession::ConvertTensors(const lite::Model *model) { if (IsContain(model_input_indices, i)) { dst_tensor->set_category(Tensor::GRAPH_INPUT); } + if (IsContain(model_output_indices, i)) { + dst_tensor->set_category(Tensor::GRAPH_OUTPUT); + } if (src_tensor->name() != nullptr) { dst_tensor->set_tensor_name(src_tensor->name()->str()); } diff --git a/mindspore/lite/src/tensor.h b/mindspore/lite/src/tensor.h index 544ebf8e20..050fd116e0 100644 --- a/mindspore/lite/src/tensor.h +++ b/mindspore/lite/src/tensor.h @@ -50,6 +50,7 @@ class Tensor : public mindspore::tensor::MSTensor { CONST_SCALAR, // weight scalar VAR, // activation tensor GRAPH_INPUT, + GRAPH_OUTPUT, }; Tensor() = default; @@ -163,6 +164,8 @@ class Tensor : public mindspore::tensor::MSTensor { bool IsGraphInput() const { return this->category_ == GRAPH_INPUT; } + bool IsGraphOutput() const { return this->category_ == GRAPH_OUTPUT; } + void Prepare() { if (allocator_ != nullptr) { data_ = allocator_->Prepare(data_); diff --git a/mindspore/lite/test/models_for_process_only.cfg b/mindspore/lite/test/models_for_process_only.cfg index 0eed3361c9..d0217c2761 100644 --- a/mindspore/lite/test/models_for_process_only.cfg +++ b/mindspore/lite/test/models_for_process_only.cfg @@ -27,4 +27,3 @@ quant_aware_bank_card_detection_inception.onnx quant_aware_bank_card_recognition_fcny.onnx quant_aware_identify_card_detect.onnx tiny-yolov3-11.onnx;2;1,416,416,3:1,2 -ml_video_edit_person_divison_pic;2 diff --git a/mindspore/lite/test/models_with_multiple_inputs.cfg b/mindspore/lite/test/models_with_multiple_inputs.cfg index fb03096d92..89ff1ae84e 100644 --- a/mindspore/lite/test/models_with_multiple_inputs.cfg +++ b/mindspore/lite/test/models_with_multiple_inputs.cfg @@ -9,3 +9,4 @@ ml_video_edit_video_segment_gauss_adaptis_part2.pb;2 ml_video_edit_video_segment_gauss_adaptis_part2_pb2tflite.tflite;2 decoder.onnx;2;1,7,512:1,7 fasterrcnn_crop.pb;1;420,630,3 +ml_video_edit_person_divison_pic;2 diff --git a/mindspore/lite/test/run_benchmark_nets.sh b/mindspore/lite/test/run_benchmark_nets.sh index 27421bcf45..c4dfb59f12 100755 --- a/mindspore/lite/test/run_benchmark_nets.sh +++ b/mindspore/lite/test/run_benchmark_nets.sh @@ -294,20 +294,26 @@ function Run_Converter() { tflite) model_fmk="TFLITE" ;; - caffemodel) - model_name=${model_name%.*} - model_fmk="CAFFE" - ;; onnx) model_fmk="ONNX" ;; mindir) model_fmk="MINDIR" ;; + *) + model_type="caffe" + model_fmk="CAFFE" + ;; esac - echo ${model_name} >> "${run_converter_log_file}" - echo './converter_lite --fmk='${model_fmk}' --modelFile='${models_path}'/'${model_name}' --outputFile='${ms_models_path}'/'${model_name} >> "${run_converter_log_file}" - ./converter_lite --fmk=${model_fmk} --modelFile=${models_path}/${model_name} --outputFile=${ms_models_path}/${model_name} + if [[ $model_fmk == "CAFFE" ]]; then + echo ${model_name} >> "${run_converter_log_file}" + echo './converter_lite --fmk='${model_fmk}' --modelFile='$models_path/${model_name}'.prototxt --weightFile='$models_path'/'${model_name}'.caffemodel --outputFile='${ms_models_path}'/'${model_name} >> "${run_converter_log_file}" + ./converter_lite --fmk=${model_fmk} --modelFile=${models_path}/${model_name}.prototxt --weightFile=${models_path}/${model_name}.caffemodel --outputFile=${ms_models_path}/${model_name} + else + echo ${model_name} >> "${run_converter_log_file}" + echo './converter_lite --fmk='${model_fmk}' --modelFile='${models_path}'/'${model_name}' --outputFile='${ms_models_path}'/'${model_name} >> "${run_converter_log_file}" + ./converter_lite --fmk=${model_fmk} --modelFile=${models_path}/${model_name} --outputFile=${ms_models_path}/${model_name} + fi if [ $? = 0 ]; then converter_result='converter '${model_type}' '${model_name}' pass';echo ${converter_result} >> ${run_converter_result_file} else