Browse Source

fix the bug that subgraph outputs are ommited in special case

tags/v1.2.0-rc1
zengxianglong 4 years ago
parent
commit
2ef5d4a0f2
6 changed files with 36 additions and 22 deletions
  1. +15
    -14
      mindspore/lite/src/lite_kernel.cc
  2. +4
    -0
      mindspore/lite/src/lite_session.cc
  3. +3
    -0
      mindspore/lite/src/tensor.h
  4. +0
    -1
      mindspore/lite/test/models_for_process_only.cfg
  5. +1
    -0
      mindspore/lite/test/models_with_multiple_inputs.cfg
  6. +13
    -7
      mindspore/lite/test/run_benchmark_nets.sh

+ 15
- 14
mindspore/lite/src/lite_kernel.cc View File

@@ -315,23 +315,24 @@ std::vector<lite::Tensor *> LiteKernelUtil::SubgraphOutputTensors(const std::vec
for (const auto &output_kernel : output_nodes) { for (const auto &output_kernel : output_nodes) {
auto &outer_out_kernels = output_kernel->out_kernels(); auto &outer_out_kernels = output_kernel->out_kernels();
auto &out_kernel_out_tensors = output_kernel->out_tensors(); auto &out_kernel_out_tensors = output_kernel->out_tensors();
if (outer_out_kernels.empty()) {
for (auto out_kernel_out_tensor : out_kernel_out_tensors) {
for (auto out_kernel_out_tensor : out_kernel_out_tensors) {
if (out_kernel_out_tensor->IsGraphOutput()) {
output_tensors.insert(out_kernel_out_tensor); output_tensors.insert(out_kernel_out_tensor);
} }
continue;
} }
for (auto outer_out_kernel : outer_out_kernels) {
auto iter = std::find(kernels.begin(), kernels.end(), outer_out_kernel);
if (iter != kernels.end()) {
continue;
}
auto &outer_out_kernel_in_tensors = outer_out_kernel->in_tensors();
for (auto out_kernel_out_tensor : out_kernel_out_tensors) {
auto outer_out_kernel_in_tensors_iter =
std::find(outer_out_kernel_in_tensors.begin(), outer_out_kernel_in_tensors.end(), out_kernel_out_tensor);
if (outer_out_kernel_in_tensors_iter != outer_out_kernel_in_tensors.end()) {
output_tensors.insert(out_kernel_out_tensor);
if (!outer_out_kernels.empty()) {
for (auto outer_out_kernel : outer_out_kernels) {
auto iter = std::find(kernels.begin(), kernels.end(), outer_out_kernel);
if (iter != kernels.end()) {
continue;
}
auto &outer_out_kernel_in_tensors = outer_out_kernel->in_tensors();
for (auto out_kernel_out_tensor : out_kernel_out_tensors) {
auto outer_out_kernel_in_tensors_iter =
std::find(outer_out_kernel_in_tensors.begin(), outer_out_kernel_in_tensors.end(), out_kernel_out_tensor);
if (outer_out_kernel_in_tensors_iter != outer_out_kernel_in_tensors.end()) {
output_tensors.insert(out_kernel_out_tensor);
}
} }
} }
} }


+ 4
- 0
mindspore/lite/src/lite_session.cc View File

@@ -176,6 +176,7 @@ int LiteSession::ConvertTensors(const lite::Model *model) {
uint32_t tensor_count = model->all_tensors_.size(); uint32_t tensor_count = model->all_tensors_.size();
MS_ASSERT(!model->sub_graphs_.empty()); MS_ASSERT(!model->sub_graphs_.empty());
auto model_input_indices = model->sub_graphs_.front()->input_indices_; auto model_input_indices = model->sub_graphs_.front()->input_indices_;
auto model_output_indices = model->sub_graphs_.front()->output_indices_;
for (uint32_t i = 0; i < tensor_count; ++i) { for (uint32_t i = 0; i < tensor_count; ++i) {
auto *src_tensor = model->all_tensors_[i]; auto *src_tensor = model->all_tensors_[i];
if (src_tensor == nullptr) { if (src_tensor == nullptr) {
@@ -197,6 +198,9 @@ int LiteSession::ConvertTensors(const lite::Model *model) {
if (IsContain(model_input_indices, i)) { if (IsContain(model_input_indices, i)) {
dst_tensor->set_category(Tensor::GRAPH_INPUT); dst_tensor->set_category(Tensor::GRAPH_INPUT);
} }
if (IsContain(model_output_indices, i)) {
dst_tensor->set_category(Tensor::GRAPH_OUTPUT);
}
if (src_tensor->name() != nullptr) { if (src_tensor->name() != nullptr) {
dst_tensor->set_tensor_name(src_tensor->name()->str()); dst_tensor->set_tensor_name(src_tensor->name()->str());
} }


+ 3
- 0
mindspore/lite/src/tensor.h View File

@@ -50,6 +50,7 @@ class Tensor : public mindspore::tensor::MSTensor {
CONST_SCALAR, // weight scalar CONST_SCALAR, // weight scalar
VAR, // activation tensor VAR, // activation tensor
GRAPH_INPUT, GRAPH_INPUT,
GRAPH_OUTPUT,
}; };
Tensor() = default; Tensor() = default;


@@ -163,6 +164,8 @@ class Tensor : public mindspore::tensor::MSTensor {


bool IsGraphInput() const { return this->category_ == GRAPH_INPUT; } bool IsGraphInput() const { return this->category_ == GRAPH_INPUT; }


bool IsGraphOutput() const { return this->category_ == GRAPH_OUTPUT; }

void Prepare() { void Prepare() {
if (allocator_ != nullptr) { if (allocator_ != nullptr) {
data_ = allocator_->Prepare(data_); data_ = allocator_->Prepare(data_);


+ 0
- 1
mindspore/lite/test/models_for_process_only.cfg View File

@@ -27,4 +27,3 @@ quant_aware_bank_card_detection_inception.onnx
quant_aware_bank_card_recognition_fcny.onnx quant_aware_bank_card_recognition_fcny.onnx
quant_aware_identify_card_detect.onnx quant_aware_identify_card_detect.onnx
tiny-yolov3-11.onnx;2;1,416,416,3:1,2 tiny-yolov3-11.onnx;2;1,416,416,3:1,2
ml_video_edit_person_divison_pic;2

+ 1
- 0
mindspore/lite/test/models_with_multiple_inputs.cfg View File

@@ -9,3 +9,4 @@ ml_video_edit_video_segment_gauss_adaptis_part2.pb;2
ml_video_edit_video_segment_gauss_adaptis_part2_pb2tflite.tflite;2 ml_video_edit_video_segment_gauss_adaptis_part2_pb2tflite.tflite;2
decoder.onnx;2;1,7,512:1,7 decoder.onnx;2;1,7,512:1,7
fasterrcnn_crop.pb;1;420,630,3 fasterrcnn_crop.pb;1;420,630,3
ml_video_edit_person_divison_pic;2

+ 13
- 7
mindspore/lite/test/run_benchmark_nets.sh View File

@@ -294,20 +294,26 @@ function Run_Converter() {
tflite) tflite)
model_fmk="TFLITE" model_fmk="TFLITE"
;; ;;
caffemodel)
model_name=${model_name%.*}
model_fmk="CAFFE"
;;
onnx) onnx)
model_fmk="ONNX" model_fmk="ONNX"
;; ;;
mindir) mindir)
model_fmk="MINDIR" model_fmk="MINDIR"
;; ;;
*)
model_type="caffe"
model_fmk="CAFFE"
;;
esac esac
echo ${model_name} >> "${run_converter_log_file}"
echo './converter_lite --fmk='${model_fmk}' --modelFile='${models_path}'/'${model_name}' --outputFile='${ms_models_path}'/'${model_name} >> "${run_converter_log_file}"
./converter_lite --fmk=${model_fmk} --modelFile=${models_path}/${model_name} --outputFile=${ms_models_path}/${model_name}
if [[ $model_fmk == "CAFFE" ]]; then
echo ${model_name} >> "${run_converter_log_file}"
echo './converter_lite --fmk='${model_fmk}' --modelFile='$models_path/${model_name}'.prototxt --weightFile='$models_path'/'${model_name}'.caffemodel --outputFile='${ms_models_path}'/'${model_name} >> "${run_converter_log_file}"
./converter_lite --fmk=${model_fmk} --modelFile=${models_path}/${model_name}.prototxt --weightFile=${models_path}/${model_name}.caffemodel --outputFile=${ms_models_path}/${model_name}
else
echo ${model_name} >> "${run_converter_log_file}"
echo './converter_lite --fmk='${model_fmk}' --modelFile='${models_path}'/'${model_name}' --outputFile='${ms_models_path}'/'${model_name} >> "${run_converter_log_file}"
./converter_lite --fmk=${model_fmk} --modelFile=${models_path}/${model_name} --outputFile=${ms_models_path}/${model_name}
fi
if [ $? = 0 ]; then if [ $? = 0 ]; then
converter_result='converter '${model_type}' '${model_name}' pass';echo ${converter_result} >> ${run_converter_result_file} converter_result='converter '${model_type}' '${model_name}' pass';echo ${converter_result} >> ${run_converter_result_file}
else else


Loading…
Cancel
Save