diff --git a/mindspore/lite/test/codegen/models_tflite.cfg b/mindspore/lite/test/codegen/models_tflite.cfg index a1ef6a36b2..92adcfb176 100644 --- a/mindspore/lite/test/codegen/models_tflite.cfg +++ b/mindspore/lite/test/codegen/models_tflite.cfg @@ -1 +1,171 @@ +hiai_model_0909_kd_rot_ps_softmax.tflite +# hiai_chinese_english_recognize_model_float32.tflite +# hiai_bigmodel_ghost_2_1_no_normalized_no_trans_tflite.tflite +# hiai_bigmodel_ghost_5_1_no_normalized_no_trans_tflite.tflite +# hiai_cn_recognize_modify_padv2.tflite +# hiai_model_normalize_object_scene_ps_20200519.tflite +# mtk_AADB_HADB_MBV2_model_fp32.tflite +# mtk_AADB_HADB_MBV3_model_fp32.tflite +mobilenet_v1_0.25_128.tflite +mobilenet_v1_0.25_160.tflite +mobilenet_v1_0.25_192.tflite +mobilenet_v1_0.25_224.tflite +mobilenet_v1_0.5_128.tflite +mobilenet_v1_0.5_160.tflite +mobilenet_v1_0.5_192.tflite +mobilenet_v1_0.5_224.tflite +mobilenet_v1_0.75_128.tflite +mobilenet_v1_0.75_160.tflite +mobilenet_v1_0.75_192.tflite +mobilenet_v1_0.75_224.tflite +mobilenet_v1_1.0_128.tflite +mobilenet_v1_1.0_160.tflite +mobilenet_v1_1.0_192.tflite +mobilenet_v1_1.0_224.tflite mobilenet_v2_1.0_224.tflite +# mtk_model_normalize_object_scene_ps_20200519_f32.tflite +# mtk_model_ckpt.tflite +mtk_age_gender.tflite +# mtk_model_face_dress.tflite +# mtk_face_features_v1.tflite +# densenet.tflite +squeezenet.tflite +# resnet_v2_101_299.tflite +# mnasnet_1.3_224.tflite +inception_v3.tflite +# deeplabv3_257_mv_gpu.tflite +# multi_person_mobilenet_v1_075_float.tflite +# hiai_vad.tflite +# ide_label_base.tflite +# ide_label_retrained.tflite +ml_ei_headpose.tflite +# ml_ei_landmark.tflite +mnist.tflite +mobilenet.tflite +resnet.tflite +scan_hms_angle1.tflite +# scan_hms_detect.tflite +# hiai_latin_ocr.tflite +# hiai_latin_ocr_1.tflite +# ml_ocr_jk.tflite +# nasnet_mobile.tflite +# nasnet_large.tflite +# model_emotions_0727_nosoftmax.tflite +# inception_resnet_v2.tflite +# ml_ocr_latin.tflite +# hiai_PoseEstimation_Pcm.tflite +# hiai_ssd_mobilenetv2_object.tflite +# hiai_cv_focusShootOCRModel_02.tflite +# hiai_cv_poseEstimation.tflite +inception_v4.tflite +# mtk_model_normalize_object_scene_ps_20200519_f16.tflite +# mtk_age_gender_fp16.tflite +# mtk_model_face_dress_fp16.tflite +mtk_AADB_HADB_MBV2_model_f16.tflite +# mtk_AADB_HADB_MBV3_model_f16.tflite +# mtk_model_emotions_0725_fp16.tflite +# mtk_face_features_v1_fp16.tflite +# siteAI_digcom_AI_ECN.tflite +siteAI_digcom_g2v_keras.tflite +siteAI_trans_nonlinear.tflite +siteAI_trans_tcpclassify.tflite +siteAI_wireless_depress_w.tflite +siteAI_wireless_restore_w.tflite +# magenta_arbitrary-image-stylization-v1-256_fp16_prediction_1.tflite +# ml_object_detect.tflite +# ml_object_detect_1.tflite +hiai_cpu_face_emotion.tflite +hiai_cpu_face_gazing.tflite +# hiai_cpu_face_headpose.tflite +# hiai_humanDetection.tflite +# hiai_cv_focusShootOCRModel_08.tflite +# ml_face_openclose.tflite +# hiai_face_model_npu.tflite +# hiai_ctpn_feature_map.tflite +# hiai_cv_labelDetectorModel_v2.tflite +hiai_cv_labelDetectorModel_v4.tflite +# hiai_dress_detect.tflite +# hiai_cv_saliencyDetectorModel.tflite +# hiai_frozen_inference_graph.tflite +# hiai_ghostnet.tflite +# hiai_iMaxDN_RGB.tflite +# hiai_iMaxSR_RGB.tflite +hiai_label_and_video.tflite +# hiai_lm_inference_graph.tflite +efficientnet_lite0_fp32_2.tflite +efficientnet_lite1_fp32_2.tflite +efficientnet_lite2_fp32_2.tflite +efficientnet_lite3_fp32_2.tflite +efficientnet_lite4_fp32_2.tflite +# mnasnet_0.50_224_1_metadata_1.tflite +# mnasnet_0.75_224_1_metadata_1.tflite +# mnasnet_1.0_128_1_metadata_1.tflite +# mnasnet_1.0_160_1_metadata_1.tflite +# mnasnet_1.0_192_1_metadata_1.tflite +# mnasnet_1.0_224_1_metadata_1.tflite +# mnasnet_1.0_96_1_metadata_1.tflite +# lite-model_on_device_vision_classifier_popular_us_products_V1_1.tflite +# lite-model_on_device_vision_classifier_popular_wine_V1_1.tflite +# posenet_mobilenet_float_075_1_default_1.tflite +# deeplabv3_1_default_1.tflite +# lite-model_deeplabv3-mobilenetv2_dm05-float16_1_default_1.tflite +# lite-model_deeplabv3-mobilenetv2-float16_1_default_1.tflite +# lite-model_east-text-detector_fp16_1.tflite +# lite-model_cartoongan_fp16_1.tflite +# lite-model_arbitrary-image-stylization-inceptionv3_fp16_predict_1.tflite +# gts_detect_5k_tf115.tflite +# mtk_isface.tflite +# mtk_landmark.tflite +# mtk_new_detect.tflite +# mtk_pose.tflite +# mtk_model_emotions_0727_nosoftmax.tflite +# mtk_model_normalize_object_scene_ps_20200826_f32_no_softmax.tflite +# mtk_276landmark_0913.tflite +# mtk_face_recognition.tflite +# mtk_convert_model.tflite +# smartreply.tflite +# mindspore_text_classification_tflite.tflite +# ml_location.tflite +# ml_text_correction.tflite +# ml_pic_shopping.tflite +# ml_vision_guide_detection3_pb2tflite.tflite +# ml_vision_guide_detection1_pb2tflite.tflite +# ml_pic_shopping_pb2tflite.tflite +# ml_ocr_jk_pb2tflite.tflite +# ml_ocr_latin_pb2tflite.tflite +# scan_hms_angle_pb2tflite.tflite +# scan_hms_detect_pb2tflite.tflite +# ml_location.tflite +# ml_face_openclose_tflite.tflite +# ml_object_detect_pb2tflite.tflite +Q_AADB_HADB_MBV2_model.tflite +# Q_convert.tflite +# Q_crnn_ori_75w_slim_norm_pb2tflite.tflite +# Q_crnn_ori_v2_405001_notrans_nopre_pb2tflite.tflite +# Q_crnn_screen_slim400w_more_20w_pb2tflite.tflite +# Q_dila-small-mix-full-fineturn-390000-nopixel-nosigmoid_tflite.tflite +# Q_focusocr_cn_recog.tflite +# Q_focusocr_jk_recog.tflite +# Q_inception-249970-672-11-16_pb2tflite.tflite +# Q_isface.tflite +# Q_landmark.tflite +# Q_language_model_hrmini_Q4_b4_17w.tflite +# Q_new_detect.tflite +# Q_object_scene.tflite +# Q_pose.tflite +# ml_ei_landmark_pb2tflite.tflite +# unet_mbv2_05_104pts.tflite +# hiai_AADB_HADB_MBV2_model_f16.tflite +# hiai_AADB_HADB_MBV2_model_fp32.tflite +# hiai_detect_curve_model_float32.tflite +# hiai_detectmodel_06_23_960_480_1180700.tflite +# hiai_detectmodel_desnet_256_128_64_32.tflite +# lite-model_aiy_vision_classifier_food_V1_1.tflite +# lite-model_disease-classification_1.tflite +# lite-model_models_mushroom-identification_v1_1.tflite +# lite-model_albert_lite_base_squadv1_metadata_1.tflite +# lite-model_mobilebert_1_metadata_1.tflite +# smartreply_1_default_1.tflite +# text_classification.tflite +# Q_detect_fpn_add_inception-1448650.tflite +# Q_hand_0812_pb2tflite.tflite diff --git a/mindspore/lite/test/codegen/run_benchmark_codegen.sh b/mindspore/lite/test/codegen/run_benchmark_codegen.sh index 936128eb3a..d7bcae2669 100644 --- a/mindspore/lite/test/codegen/run_benchmark_codegen.sh +++ b/mindspore/lite/test/codegen/run_benchmark_codegen.sh @@ -1,5 +1,33 @@ #!/bin/bash +function Run_Converter() { + cd ${x86_path} || exit 1 + tar -zxf mindspore-lite-${version}-inference-linux-x64.tar.gz || exit 1 + cd ${x86_path}/mindspore-lite-${version}-inference-linux-x64/ || exit 1 + + cp tools/converter/converter/converter_lite ./ || exit 1 + export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./tools/converter/lib/:./tools/converter/third_party/glog/lib + + rm -rf ${ms_models_path} + mkdir -p ${ms_models_path} + + # Convert tflite models: + while read line; do + model_name=${line} + if [[ $model_name == \#* ]]; then + continue + fi + echo ${model_name} >> "${run_converter_log_file}" + echo './converter_lite --fmk=TFLITE --modelFile='${models_path}'/'${model_name}' --outputFile='${ms_models_path}'/'${model_name}'' >> "${run_converter_log_file}" + ./converter_lite --fmk=TFLITE --modelFile=$models_path/${model_name} --outputFile=${ms_models_path}/${model_name} + if [ $? = 0 ]; then + converter_result='converter tflite '${model_name}' pass';echo ${converter_result} >> ${run_converter_result_file} + else + converter_result='converter tflite '${model_name}' failed';echo ${converter_result} >> ${run_converter_result_file};return 1 + fi + done < ${models_tflite_config} +} + function Run_x86() { local CODEGEN_PATH=${x86_path}/mindspore-lite-${version}-inference-linux-x64/tools/codegen @@ -64,8 +92,8 @@ function Print_Benchmark_Result() { basepath=$(pwd) echo ${basepath} -# Example:sh run_benchmark_nets.sh -r /home/temp_test -m /home/temp_test/models -s /home/temp_test/ms_models -d "8KE5T19620002408" -while getopts "r:m:e:s:" opt; do +# Example:sh run_benchmark_nets.sh -r /home/temp_test -m /home/temp_test/models -d "8KE5T19620002408" +while getopts "r:m:e:" opt; do case ${opt} in r) release_path=${OPTARG} @@ -75,10 +103,6 @@ while getopts "r:m:e:s:" opt; do models_path=${OPTARG} echo "models_path is ${OPTARG}" ;; - s) - ms_models_path=${OPTARG} - echo "ms_models_path is ${OPTARG}" - ;; e) backend=${OPTARG} echo "backend is ${OPTARG}" @@ -94,6 +118,7 @@ file_name=$(ls ${x86_path}/*inference-linux-x64.tar.gz) IFS="-" read -r -a file_name_array <<< "$file_name" version=${file_name_array[2]} +ms_models_path=${basepath}/ms_models build_path=${basepath}/build models_tflite_config=${basepath}/models_tflite.cfg @@ -107,6 +132,26 @@ echo ' ' > ${run_converter_result_file} run_x86_log_file=${basepath}/run_x86_log.txt echo 'run x86 logs: ' > ${run_x86_log_file} +# Run converter +echo "start Run converter ..." +Run_Converter +Run_converter_PID=$! +sleep 1 + +wait ${Run_converter_PID} +Run_converter_status=$? + +# Check converter result and return value +if [[ ${Run_converter_status} = 0 ]];then + echo "Run converter success" + Print_Converter_Result +else + echo "Run converter failed" + cat ${run_converter_log_file} + Print_Converter_Result + exit 1 +fi + # Write benchmark result to temp file run_benchmark_result_file=${basepath}/run_benchmark_result.txt echo ' ' > ${run_benchmark_result_file}