diff --git a/mindspore/lite/src/ops/populate/deconv2d_populate.cc b/mindspore/lite/src/ops/populate/deconv2d_populate.cc index 6abd355e93..96ffb97e9c 100644 --- a/mindspore/lite/src/ops/populate/deconv2d_populate.cc +++ b/mindspore/lite/src/ops/populate/deconv2d_populate.cc @@ -46,6 +46,7 @@ OpParameter *PopulateDeconvParameter(const mindspore::lite::PrimitiveC *primitiv conv_param->pad_r_ = deconv_lite_primitive->PadRight(); conv_param->dilation_h_ = conv_primitive->GetDilateH(); conv_param->dilation_w_ = conv_primitive->GetDilateW(); + conv_param->group_ = conv_primitive->GetGroup(); auto act_type = conv_primitive->GetActivationType(); switch (act_type) { case schema::ActivationType_RELU: diff --git a/mindspore/lite/src/runtime/kernel/npu/deconvolution_npu.cc b/mindspore/lite/src/runtime/kernel/npu/deconvolution_npu.cc index 524732ff54..ac15301345 100644 --- a/mindspore/lite/src/runtime/kernel/npu/deconvolution_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/deconvolution_npu.cc @@ -24,13 +24,17 @@ using mindspore::schema::PrimitiveType_DeConv2D; namespace mindspore::kernel { int DeconvolutionNPUKernel::IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) { + if (conv_param_->group_ != 1) { + MS_LOG(WARNING) << "Only support group equals 1 for npu deconvolution op"; + return RET_ERROR; + } return RET_OK; } int DeconvolutionNPUKernel::SetConvParam() { deconv_->set_attr_strides(ge::AttrValue::LIST_INT({conv_param_->stride_h_, conv_param_->stride_w_})); deconv_->set_attr_dilations(ge::AttrValue::LIST_INT({conv_param_->dilation_h_, conv_param_->dilation_w_})); - deconv_->set_attr_groups(1); + deconv_->set_attr_groups(conv_param_->group_); if (conv_param_->pad_mode_ == Pad_Same) { deconv_->set_attr_pad_mode(ge::AttrValue::STR{"SAME"}); diff --git a/mindspore/lite/test/models_npu.cfg b/mindspore/lite/test/models_npu.cfg index 032c4861f6..0d5d1e2289 100644 --- a/mindspore/lite/test/models_npu.cfg +++ b/mindspore/lite/test/models_npu.cfg @@ -1,3 +1,4 @@ mobilenet_v2_1.0_224.tflite 2.5 squeezenet.tflite 2.5 inception_v3.tflite 1 +porseg_tmp.onnx 1 2 diff --git a/mindspore/lite/test/models_with_several_inputs_or_without_outputs.cfg b/mindspore/lite/test/models_with_several_inputs_or_without_outputs.cfg index 05da988dd3..24ca2212b7 100644 --- a/mindspore/lite/test/models_with_several_inputs_or_without_outputs.cfg +++ b/mindspore/lite/test/models_with_several_inputs_or_without_outputs.cfg @@ -30,3 +30,4 @@ ml_ei_facedetection.onnx quant_aware_bank_card_detection_inception.onnx quant_aware_bank_card_recognition_fcny.onnx quant_aware_identify_card_detect.onnx +porseg_tmp.onnx;2 diff --git a/mindspore/lite/test/run_benchmark_nets.sh b/mindspore/lite/test/run_benchmark_nets.sh index cd42456882..3d1d1e07d2 100644 --- a/mindspore/lite/test/run_benchmark_nets.sh +++ b/mindspore/lite/test/run_benchmark_nets.sh @@ -1353,10 +1353,21 @@ function Run_arm64() { while read line; do model_name=`echo ${line}|awk -F ' ' '{print $1}'` accuracy_limit=`echo ${line}|awk -F ' ' '{print $2}'` + input_num=`echo ${line}|awk -F ' ' '{print $3}'` + data_path="/data/local/tmp/input_output/" + input_files='' + if [[ -z "$input_num" || $input_num == 1 ]]; then + input_files=${data_path}'input/'$model_name'.ms.bin' + elif [[ ! -z "$input_num" && $input_num -gt 1 ]]; then + for i in $(seq 1 $input_num) + do + input_files=$input_files${data_path}'input/'$model_name'.ms.bin_'$i',' + done + fi echo "mindspore run npu: ${model_name}, accuracy limit:${accuracy_limit}" >> "${run_arm64_log_file}" echo 'cd /data/local/tmp/benchmark_test' > adb_run_cmd.txt - echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --device=NPU --modelFile='${model_name}'.ms --inDataFile=/data/local/tmp/input_output/input/'${model_name}'.ms.bin --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out --accuracyThreshold='${accuracy_limit} >> "${run_arm64_log_file}" - echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --device=NPU --modelFile='${model_name}'.ms --inDataFile=/data/local/tmp/input_output/input/'${model_name}'.ms.bin --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out --accuracyThreshold='${accuracy_limit} >> adb_run_cmd.txt + echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --device=NPU --modelFile='${model_name}'.ms --inDataFile='${input_files}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out --accuracyThreshold='${accuracy_limit} >> "${run_arm64_log_file}" + echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --device=NPU --modelFile='${model_name}'.ms --inDataFile='${input_files}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out --accuracyThreshold='${accuracy_limit} >> adb_run_cmd.txt adb -s ${device_id} shell < adb_run_cmd.txt >> "${run_arm64_log_file}" if [ $? = 0 ]; then run_result='arm64_npu: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file} @@ -1377,7 +1388,8 @@ function Run_arm64() { input_files='' output_file='' data_path="/data/local/tmp/input_output/" - if [[ -z "$input_num" || $input_num == 1 ]] && [ -e ${data_path}'input/'${model_name}'.ms.bin' ]; then + model_x86_path="/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/" + if [[ -z "$input_num" || $input_num == 1 ]] && [ -e ${model_x86_path}'input/'${model_name}'.ms.bin' ]; then input_files=${data_path}'input/'$model_name'.ms.bin' elif [[ ! -z "$input_num" && $input_num -gt 1 ]]; then for i in $(seq 1 $input_num) @@ -1385,7 +1397,7 @@ function Run_arm64() { input_files=$input_files${data_path}'input/'$model_name'.ms.bin_'$i',' done fi - if [ -e ${data_path}'output/'${model_name}'.ms.out' ]; then + if [ -e ${model_x86_path}'output/'${model_name}'.ms.out' ]; then output_file=${data_path}'output/'${model_name}'.ms.out' fi if [[ ${model_name##*.} == "caffemodel" ]]; then