!23792 [MS][LITE]add npu models + remove model in phone + cancel loop in script

Merge pull request !23792 from XianglongZeng/myms_new_3
This commit is contained in:
i-robot 2021-09-26 02:51:37 +00:00 committed by Gitee
commit 24ebb966e9
10 changed files with 173 additions and 25 deletions

View File

@ -77,7 +77,7 @@ ml_video_edit_img_segment_adaptise_pb2tflite.tflite;2 0.5
ml_video_edit_imitate_filter.onnx 200
hdc_mobilenet_1w_class.onnx 20
hdc_age_medium 504
posenet_mobilenet_float_075_1_default_1.tflite 395
posenet_mobilenet_float_075_1_default_1.tflite 14
nasnet_mobile.tflite 1
ml_video_edit_art_generate.onnx 0.5
ml_video_edit_art_transfer.onnx;3 3
@ -99,3 +99,152 @@ ml_motion_capture_spin-res50-poolingnoceilmode;4 1
ml_video_edit_hair_dyeing_migrate_v2_fix.onnx;4 1.5
ml_motion_capture_yolov3-spp-deploy_ddk_prune 1
ml_video_edit_seg_320 0.5
hiai_model_0909_kd_rot_ps_softmax.tflite 4
hiai_chinese_english_recognize_model_float32.tflite 3
hiai_bigmodel_ghost_2_1_no_normalized_no_trans_tflite.tflite 2
hiai_bigmodel_ghost_5_1_no_normalized_no_trans_tflite.tflite 3
hiai_cn_recognize_modify_padv2.tflite 5
hiai_model_normalize_object_scene_ps_20200519.tflite 14
mtk_AADB_HADB_MBV2_model_fp32.tflite 1.5
#mtk_AADB_HADB_MBV3_model_fp32.tflite 2.5
mtk_model_ckpt.tflite 5
#mtk_age_gender.tflite
#mtk_model_face_dress.tflite;1:input
mtk_face_features_v1.tflite 8
mnasnet_1.3_224.tflite;1:input 2
deeplabv3_257_mv_gpu.tflite;1:sub_7 1
multi_person_mobilenet_v1_075_float.tflite;1:sub_2 6
ide_label_base.tflite;1:input 11
#large precision bias error
#ide_label_retrained.tflite;1:input_1
#ml_ei_headpose.tflite;1:input_1
#ml_ei_landmark.tflite;1:input_image
mnist.tflite;1:conv2d_input 1.5
#mobilenet.tflite;1:conv2d_input
#resnet.tflite;1:input_1
scan_hms_angle1.tflite;1:normalized_input_image_tensor 1.5
scan_hms_detect.tflite;1:normalized_input_image_tensor 41
hiai_latin_ocr.tflite;1:input_0 32
hiai_latin_ocr_1.tflite;1:input_0 5.5
#ml_ocr_jk.tflite;1:input_0
#nasnet_mobile.tflite;1:input
#nasnet_large.tflite;1:input
#model_emotions_0727_nosoftmax.tflite;1:input
#ml_ocr_latin.tflite;1:input_0
hiai_PoseEstimation_Pcm.tflite;1:image 12
#large precision bias error
#hiai_ssd_mobilenetv2_object.tflite;1:image_tensor
hiai_cv_focusShootOCRModel_02.tflite;1:input_0 4.5
hiai_cv_poseEstimation.tflite;1:Image 37
mtk_model_normalize_object_scene_ps_20200519_f16.tflite;1:input_0 3
#mtk_age_gender_fp16.tflite;1:img
#mtk_model_face_dress_fp16.tflite;1:img
#mtk_AADB_HADB_MBV2_model_f16.tflite;1:input_0
#mtk_AADB_HADB_MBV3_model_f16.tflite;1:input_0
#mtk_model_emotions_0725_fp16.tflite;1:input
mtk_face_features_v1_fp16.tflite;1:input 4
#siteAI_digcom_AI_ECN.tflite;1:input_expansion
siteAI_digcom_g2v_keras.tflite;1:conv2d_1_input 2
#siteAI_trans_nonlinear.tflite;1:features_placeholder
siteAI_trans_tcpclassify.tflite;1:conv2d_1_input 2.5
#siteAI_wireless_depress_w.tflite;1:x-input
#siteAI_wireless_restore_w.tflite;1:x-input
#magenta_arbitrary-image-stylization-v1-256_fp16_prediction_1.tflite;1:style_image
#ml_object_detect.tflite;1:input/input_data
#ml_object_detect_1.tflite;1:input/input_data
hiai_cpu_face_emotion.tflite;1:input_0 1.5
#hiai_cpu_face_gazing.tflite;1:input_0
hiai_cpu_face_headpose.tflite;1:input_0 1.5
hiai_humanDetection.tflite;1:normalized_input_image_tensor 150
hiai_cv_focusShootOCRModel_08.tflite;1:input 4
#ml_face_openclose.tflite;1:input
hiai_face_model_npu.tflite;1:input_0 3
hiai_ctpn_feature_map.tflite;1:input_image 2
hiai_cv_labelDetectorModel_v2.tflite;1:input_0 10
hiai_cv_labelDetectorModel_v4.tflite;1:input_0 1
hiai_dress_detect.tflite;1:data 1
#hiai_cv_saliencyDetectorModel.tflite;1:image_tensor
hiai_frozen_inference_graph.tflite;1:image_tensor 2.5
#hiai_ghostnet.tflite;1:input
#hiai_iMaxDN_RGB.tflite;1:input
#hiai_iMaxSR_RGB.tflite;1:input
hiai_label_and_video.tflite;1:input_0 4.5
#hiai_lm_inference_graph.tflite;1:image_tensor
mnasnet_0.50_224_1_metadata_1.tflite;1:input 3.5
mnasnet_0.75_224_1_metadata_1.tflite;1:input 3
mnasnet_1.0_128_1_metadata_1.tflite;1:input 2.5
mnasnet_1.0_160_1_metadata_1.tflite;1:input 2
mnasnet_1.0_192_1_metadata_1.tflite;1:input 2
mnasnet_1.0_224_1_metadata_1.tflite;1:input 1.5
mnasnet_1.0_96_1_metadata_1.tflite;1:input 1.5
#lite-model_on_device_vision_classifier_popular_us_products_V1_1.tflite;1:uint8_image_input
#lite-model_on_device_vision_classifier_popular_wine_V1_1.tflite;1:uint8_image_input
#lite-model_deeplabv3-mobilenetv2_dm05-float16_1_default_1.tflite;1:sub_7
#lite-model_deeplabv3-mobilenetv2-float16_1_default_1.tflite;1:sub_7
lite-model_east-text-detector_fp16_1.tflite;1:input_images 460
#lite-model_cartoongan_fp16_1.tflite;1:input_photo
lite-model_arbitrary-image-stylization-inceptionv3_fp16_predict_1.tflite;1:style_image 1
#gts_detect_5k_tf115.tflite;1:normalized_input_image_tensor
#mtk_isface.tflite;1:data
#mtk_landmark.tflite;1:img
#mtk_new_detect.tflite;1:input
#mtk_pose.tflite;1:input
#mtk_model_emotions_0727_nosoftmax.tflite;1:input
mtk_model_normalize_object_scene_ps_20200826_f32_no_softmax.tflite;1:input_0 32
mtk_276landmark_0913.tflite;1:input 4
#mtk_face_recognition.tflite;1:input
#mtk_convert_model.tflite;1:data
#smartreply.tflite;1:input_sentence
mindspore_text_classification_tflite.tflite;1:base_input 3
# ml_location.tflite
#ml_text_correction.tflite;1:hed_input
#ml_pic_shopping.tflite;1:images
#ml_vision_guide_detection3_pb2tflite.tflite;1:input/input_data
#ml_vision_guide_detection1_pb2tflite.tflite;1:input/input_data
#ml_pic_shopping_pb2tflite.tflite;1:images
#ml_ocr_jk_pb2tflite.tflite;1:input_0
#ml_ocr_latin_pb2tflite.tflite;1:input_0
scan_hms_angle_pb2tflite.tflite;1:normalized_input_image_tensor 2.5
scan_hms_detect_pb2tflite.tflite;1:normalized_input_image_tensor 110
#ml_location.tflite;1:inputs
#ml_face_openclose_tflite.tflite;1:input
#ml_object_detect_pb2tflite.tflite;1:input/input_data
Q_AADB_HADB_MBV2_model.tflite;1:input_0 2.5
#Q_convert.tflite;1:input
#Q_crnn_ori_75w_slim_norm_pb2tflite.tflite;1:input_0
#Q_crnn_ori_v2_405001_notrans_nopre_pb2tflite.tflite;1:input_0
#Q_crnn_screen_slim400w_more_20w_pb2tflite.tflite;1:input_0
Q_dila-small-mix-full-fineturn-390000-nopixel-nosigmoid_tflite.tflite;1:input 2
Q_focusocr_cn_recog.tflite;1:input_0 6
Q_focusocr_jk_recog.tflite;1:input_0 4.5
Q_inception-249970-672-11-16_pb2tflite.tflite;1:input 3
#Q_isface.tflite;1:data
#Q_landmark.tflite;1:img
Q_language_model_hrmini_Q4_b4_17w.tflite;1:input_0 51
#Q_new_detect.tflite;1:input
Q_object_scene.tflite;1:input_0 2.5
Q_pose.tflite;1:input 1
#ml_ei_landmark_pb2tflite.tflite;1:input_image
unet_mbv2_05_104pts.tflite;1:input 4.5
hiai_AADB_HADB_MBV2_model_f16.tflite;1:input_0 1
hiai_AADB_HADB_MBV2_model_fp32.tflite;1:input_0 2.5
#hiai_detect_curve_model_float32.tflite;1:input
hiai_detectmodel_06_23_960_480_1180700.tflite;1:input 2.5
hiai_detectmodel_desnet_256_128_64_32.tflite;1:input 13
lite-model_aiy_vision_classifier_food_V1_1.tflite;1:input 15
lite-model_disease-classification_1.tflite;1:mobilenetv2_1_00_224_input 30
#lite-model_models_mushroom-identification_v1_1.tflite;1:input
#smartreply_1_default_1.tflite;1:input_sentence
#text_classification.tflite;1:embedding_input
#Q_detect_fpn_add_inception-1448650.tflite;1:input
Q_hand_0812_pb2tflite.tflite;1:input 8
#bloom_landmark.tflite;1:img
Q888_age_gender_orderd.tflite;1:input 5.5
#Q888_face_dress_mv3y.tflite;1:input
Q888_HADB_AADB_MBV2_model_fp32.tflite;1:input_0 1
#Q888_landmark.tflite;1:img
Q888_pose.tflite;1:input 1.5
Q888_lapa158_unet_0924.tflite;1:input 4.5
#Q888_isface.tflite;1:data
#Q888_new_detect.tflite;1:input
Q888_model_normalize_object_scene_ps_20200826_f32_no_softmax.tflite;1:input_0 1.5

View File

@ -102,7 +102,7 @@ gender_resnet34_lzl.onnx;1:input.1
tiny-yolov3-11.onnx;2:input_1,image_shape;1,224,224,3:1,2 3
# cur acc for ml_video_edit_art_transfer is 2+%
ml_video_edit_art_transfer.onnx;3:input,sMatrix,sMean
ssd-10.onnx;1:image;;;calib_only
ssd-10.onnx;1:image
Q888_CV_face_recognition_self.onnx;1:input
ml_video_edit_dimming_tech_model_styleGan.onnx;2:0,lightFeature
ml_video_edit_hair_dyeing_migrate_v2_fix.onnx;4

View File

@ -92,12 +92,12 @@ tacotron_encoder_stf.pb;5:phones,tones,seg_tags,prosodies,input_length;1,62:1,62
female_model_step2_int16_noiseout.pb;66:cur_mel,noise_next,big_mel_c,upsample_net_conv_in_stack,upsample_net_layers_1_stack,upsample_net_layers_2_stack,upsample_net_layers_3_stack,conv_layers_0_stack,conv_layers_1_stack,conv_layers_2_stack,conv_layers_3_stack,conv_layers_4_stack,conv_layers_5_stack,conv_layers_6_stack,conv_layers_7_stack,conv_layers_8_stack,conv_layers_9_stack,conv_layers_10_stack,conv_layers_11_stack,conv_layers_12_stack,conv_layers_13_stack,conv_layers_14_stack,conv_layers_15_stack,conv_layers_16_stack,conv_layers_17_stack,conv_layers_18_stack,conv_layers_19_stack,conv_layers_20_stack,conv_layers_21_stack,conv_layers_22_stack,conv_layers_23_stack,conv_layers_24_stack,conv_layers_25_stack,conv_layers_26_stack,conv_layers_27_stack,conv_layers_28_stack,conv_layers_29_stack,h_0_stack,h_1_stack,h_2_stack,h_3_stack,h_4_stack,h_5_stack,h_6_stack,h_7_stack,h_8_stack,h_9_stack,h_10_stack,h_11_stack,h_12_stack,h_13_stack,h_14_stack,h_15_stack,h_16_stack,h_17_stack,h_18_stack,h_19_stack,h_20_stack,h_21_stack,h_22_stack,h_23_stack,h_24_stack,h_25_stack,h_26_stack,h_27_stack,h_28_stack
ml_female_model_step6_noiseout.pb;66:cur_mel,noise_next,big_mel_c,upsample_net_conv_in_stack,upsample_net_layers_1_stack,upsample_net_layers_2_stack,upsample_net_layers_3_stack,conv_layers_0_stack,conv_layers_1_stack,conv_layers_2_stack,conv_layers_3_stack,conv_layers_4_stack,conv_layers_5_stack,conv_layers_6_stack,conv_layers_7_stack,conv_layers_8_stack,conv_layers_9_stack,conv_layers_10_stack,conv_layers_11_stack,conv_layers_12_stack,conv_layers_13_stack,conv_layers_14_stack,conv_layers_15_stack,conv_layers_16_stack,conv_layers_17_stack,conv_layers_18_stack,conv_layers_19_stack,conv_layers_20_stack,conv_layers_21_stack,conv_layers_22_stack,conv_layers_23_stack,conv_layers_24_stack,conv_layers_25_stack,conv_layers_26_stack,conv_layers_27_stack,conv_layers_28_stack,conv_layers_29_stack,h_0_stack,h_1_stack,h_2_stack,h_3_stack,h_4_stack,h_5_stack,h_6_stack,h_7_stack,h_8_stack,h_9_stack,h_10_stack,h_11_stack,h_12_stack,h_13_stack,h_14_stack,h_15_stack,h_16_stack,h_17_stack,h_18_stack,h_19_stack,h_20_stack,h_21_stack,h_22_stack,h_23_stack,h_24_stack,h_25_stack,h_26_stack,h_27_stack,h_28_stack
ml_male_model_step6_noiseout.pb;66:cur_mel,noise_next,big_mel_c,upsample_net_conv_in_stack,upsample_net_layers_1_stack,upsample_net_layers_2_stack,upsample_net_layers_3_stack,conv_layers_0_stack,conv_layers_1_stack,conv_layers_2_stack,conv_layers_3_stack,conv_layers_4_stack,conv_layers_5_stack,conv_layers_6_stack,conv_layers_7_stack,conv_layers_8_stack,conv_layers_9_stack,conv_layers_10_stack,conv_layers_11_stack,conv_layers_12_stack,conv_layers_13_stack,conv_layers_14_stack,conv_layers_15_stack,conv_layers_16_stack,conv_layers_17_stack,conv_layers_18_stack,conv_layers_19_stack,conv_layers_20_stack,conv_layers_21_stack,conv_layers_22_stack,conv_layers_23_stack,conv_layers_24_stack,conv_layers_25_stack,conv_layers_26_stack,conv_layers_27_stack,conv_layers_28_stack,conv_layers_29_stack,h_0_stack,h_1_stack,h_2_stack,h_3_stack,h_4_stack,h_5_stack,h_6_stack,h_7_stack,h_8_stack,h_9_stack,h_10_stack,h_11_stack,h_12_stack,h_13_stack,h_14_stack,h_15_stack,h_16_stack,h_17_stack,h_18_stack,h_19_stack,h_20_stack,h_21_stack,h_22_stack,h_23_stack,h_24_stack,h_25_stack,h_26_stack,h_27_stack,h_28_stack
ml_tts_decoder_control_flow.pb;5:h_1,c_1,h_0,decoder_inputs_array,c_0
ml_tts_decoder_control_flow.pb;5:h_1,c_1,h_0,decoder_inputs_array,c_0;;;need_loop
ml_tts_decoder.pb;5:h_1,c_1,h_0,decoder_inputs_array,c_0
ml_tts_encoder_control_flow.pb;4:phones,alpha,spk_id,input_length;1,22:1:1:1;;input_dependent
ml_tts_encoder_control_flow.pb;4:phones,alpha,spk_id,input_length;1,22:1:1:1;;input_dependent+need_loop
ml_tts_vocoder.pb;66:cur_mel,noise_next,big_mel_c,upsample_net_conv_in_stack,upsample_net_layers_1_stack,upsample_net_layers_2_stack,upsample_net_layers_3_stack,conv_layers_0_stack,conv_layers_1_stack,conv_layers_2_stack,conv_layers_3_stack,conv_layers_4_stack,conv_layers_5_stack,conv_layers_6_stack,conv_layers_7_stack,conv_layers_8_stack,conv_layers_9_stack,conv_layers_10_stack,conv_layers_11_stack,conv_layers_12_stack,conv_layers_13_stack,conv_layers_14_stack,conv_layers_15_stack,conv_layers_16_stack,conv_layers_17_stack,conv_layers_18_stack,conv_layers_19_stack,conv_layers_20_stack,conv_layers_21_stack,conv_layers_22_stack,conv_layers_23_stack,conv_layers_24_stack,conv_layers_25_stack,conv_layers_26_stack,conv_layers_27_stack,conv_layers_28_stack,conv_layers_29_stack,h_0_stack,h_1_stack,h_2_stack,h_3_stack,h_4_stack,h_5_stack,h_6_stack,h_7_stack,h_8_stack,h_9_stack,h_10_stack,h_11_stack,h_12_stack,h_13_stack,h_14_stack,h_15_stack,h_16_stack,h_17_stack,h_18_stack,h_19_stack,h_20_stack,h_21_stack,h_22_stack,h_23_stack,h_24_stack,h_25_stack,h_26_stack,h_27_stack,h_28_stack
hiai_nlu_model.pb;3:input_ids,input_mask,segment_ids;1,16:1,16:1,16
gts_object_detect_Ics.pb;1:image;420,630,3;;input_dependent
gts_object_detect_Ics.pb;1:image;420,630,3;;input_dependent+need_loop
hiai_transformer_encoder.pb;15:buffer_in_0,buffer_in_1,buffer_in_2,buffer_in_3,buffer_in_4,buffer_in_5,buffer_in_6,buffer_in_7,buffer_in_8,buffer_in_9,buffer_in_10,buffer_in_11,buffer_in_12,buffer_in_13,encoder_in_deploy
decoder_step_nocumsum_v5.pb;13:h_1,h_2,c_2,c_1,c_0,dec_lr_inputs,dec_lr_posmat,dec_ref_frames,time_step,dec_lr_sigma,h_0,previous_output,dec_lr_dend;1,512:1,512:1,512:1,512:1,512:1,127,320:1,1429,2:1,127:1:1,127:1,512:1,80:1,127
ml_audio_kit_encoder_v5.pb;6:input_length,seg_tags,prosodies,phones,alpha,tones;1:1,32:1,32:1,32:1:1,32

View File

@ -80,8 +80,8 @@ ml_video_edit_oneclick_adaptis.pb;3 6
#encoder_0111.pb;4;1:1,44:1:1
ml_female_model_step6_noiseout.pb;66 2
ml_male_model_step6_noiseout.pb;66 2.5
ml_tts_encoder_control_flow.pb;4;1,22:1:1:1 1.5
ml_tts_decoder_control_flow.pb;5 1
ml_tts_encoder_control_flow.pb;4;1,22:1:1:1;;input_dependent+need_loop 1.5
ml_tts_decoder_control_flow.pb;5;;;need_loop 1
ml_tts_decoder.pb;5 2.5
ml_tts_vocoder.pb;66 53
hiai_transformer_encoder.pb;15 4

View File

@ -151,9 +151,6 @@ function Run_Benchmark() {
echo "Benchmarking ${model_name} $6 $7 ......"
# adjust benchmark mode
benchmark_mode="calib"
if [[ $6 == "arm64" && $7 == "CPU" && ! ${cfg_file_name} =~ "fp16" ]]; then
benchmark_mode="calib+loop"
fi
# adjust precision mode
mode="fp32"
if [[ ${cfg_file_name} =~ "fp16" ]]; then
@ -164,7 +161,6 @@ function Run_Benchmark() {
if [[ ${cfg_file_name} =~ "weightquant" ]]; then
infix="_${cfg_file##*_}"
infix=${infix%.*}
benchmark_mode="calib"
elif [[ ${cfg_file_name} =~ "_train" ]]; then
infix="_train"
elif [[ ${cfg_file_name} =~ "_posttraining" ]]; then
@ -207,8 +203,8 @@ function Run_Benchmark() {
if [[ ${mode} == "fp16" ]]; then
enableFp16="true"
fi
if [[ ${extra_info} =~ "calib_only" ]]; then
benchmark_mode="calib"
if [[ $6 == "arm64" && ${extra_info} =~ "need_loop" ]]; then
benchmark_mode="calib+loop"
fi
# start running benchmark
echo "---------------------------------------------------------" >> "$4"
@ -267,14 +263,14 @@ function Run_Benchmark() {
# Print start msg before run testcase
function MS_PRINT_TESTCASE_START_MSG() {
echo ""
echo -e "-----------------------------------------------------------------------------------------------------------------------------------"
echo -e "env Testcase Result "
echo -e "--- -------- ------ "
echo -e "----------------------------------------------------------------------------------------------------------------------------------------"
echo -e "env Testcase Result "
echo -e "--- -------- ------ "
}
# Print start msg after run testcase
function MS_PRINT_TESTCASE_END_MSG() {
echo -e "-----------------------------------------------------------------------------------------------------------------------------------"
echo -e "----------------------------------------------------------------------------------------------------------------------------------------"
}
function Print_Converter_Result() {
@ -290,7 +286,7 @@ function Print_Benchmark_Result() {
MS_PRINT_TESTCASE_START_MSG
while read line; do
arr=("${line}")
printf "%-20s %-100s %-7s\n" ${arr[0]} ${arr[1]} ${arr[2]}
printf "%-25s %-100s %-7s\n" ${arr[0]} ${arr[1]} ${arr[2]}
done < $1
MS_PRINT_TESTCASE_END_MSG
}

View File

@ -160,4 +160,5 @@ fi
echo "Run_arm32_fp32 and Run_armv82_a32_fp16 is ended"
Print_Benchmark_Result $run_benchmark_result_file
adb -s ${device_id} shell "rm -rf /data/local/tmp/benchmark_test/*"
exit ${isFailed}

View File

@ -196,4 +196,5 @@ fi
echo "Run_arm64_fp32 and Run_arm64_fp16 is ended"
Print_Benchmark_Result $run_benchmark_result_file
adb -s ${device_id} shell "rm -rf /data/local/tmp/benchmark_test/*"
exit ${isFailed}

View File

@ -231,4 +231,5 @@ fi
echo "Run_gpu and Run_cropper and mindrt_parallel is ended"
Print_Benchmark_Result $run_benchmark_result_file
adb -s ${device_id} shell "rm -rf /data/local/tmp/benchmark_test/*"
exit ${isFailed}

View File

@ -138,4 +138,5 @@ fi
echo "Run_npu ended"
Print_Benchmark_Result $run_benchmark_result_file
adb -s ${device_id} shell "rm -rf /data/local/tmp/benchmark_test/*"
exit ${isFailed}

View File

@ -121,8 +121,8 @@ function Run_x86() {
# Prepare the config file list
local x86_cfg_file_list=("$models_tf_config" "$models_tflite_config" "$models_caffe_config" "$models_onnx_config" "$models_mindspore_config" \
"$models_mindspore_train_config" "$models_posttraining_config" "$models_process_only_fp16_config" \
"$models_tflite_awaretraining_config" "$models_weightquant_0bit_config" "$models_weightquant_8bit_config" "$models_weightquant_7bit_config" \
"$models_mindspore_train_config" "$models_posttraining_config" "$models_tflite_awaretraining_config" \
"$models_weightquant_0bit_config" "$models_weightquant_8bit_config" "$models_weightquant_7bit_config" \
"$models_weightquant_9bit_config" "$models_process_only_config")
# Run converted models:
# $1:cfgFileList; $2:modelPath; $3:dataPath; $4:logFile; $5:resultFile; $6:platform; $7:processor; $8:phoneId;
@ -139,8 +139,8 @@ function Run_x86_sse() {
# Prepare the config file list
local sse_cfg_file_list=("$models_tf_config" "$models_tflite_config" "$models_caffe_config" "$models_onnx_config" "$models_mindspore_config" \
"$models_mindspore_train_config" "$models_posttraining_config" "$models_process_only_fp16_config" \
"$models_tflite_awaretraining_config" "$models_weightquant_0bit_config" "$models_weightquant_8bit_config" "$models_weightquant_7bit_config" \
"$models_mindspore_train_config" "$models_posttraining_config" "$models_tflite_awaretraining_config" \
"$models_weightquant_0bit_config" "$models_weightquant_8bit_config" "$models_weightquant_7bit_config" \
"$models_weightquant_9bit_config" "$models_process_only_config")
# Run converted models:
# $1:cfgFileList; $2:modelPath; $3:dataPath; $4:logFile; $5:resultFile; $6:platform; $7:processor; $8:phoneId;
@ -157,8 +157,8 @@ function Run_x86_avx() {
# Prepare the config file list
local avx_cfg_file_list=("$models_tf_config" "$models_tflite_config" "$models_caffe_config" "$models_onnx_config" "$models_mindspore_config" \
"$models_mindspore_train_config" "$models_posttraining_config" "$models_process_only_fp16_config" \
"$models_tflite_awaretraining_config" "$models_weightquant_0bit_config" "$models_weightquant_8bit_config" "$models_weightquant_7bit_config" \
"$models_mindspore_train_config" "$models_posttraining_config" "$models_tflite_awaretraining_config" \
"$models_weightquant_0bit_config" "$models_weightquant_8bit_config" "$models_weightquant_7bit_config" \
"$models_weightquant_9bit_config" "$models_process_only_config")
# Run converted models:
# $1:cfgFileList; $2:modelPath; $3:dataPath; $4:logFile; $5:resultFile; $6:platform; $7:processor; $8:phoneId; $9:benchmark_mode
@ -251,7 +251,6 @@ models_weightquant_7bit_config=${basepath}/../config/models_weightquant_7bit.cfg
models_weightquant_9bit_config=${basepath}/../config/models_weightquant_9bit.cfg
models_weightquant_8bit_config=${basepath}/../config/models_weightquant_8bit.cfg
models_process_only_config=${basepath}/../config/models_process_only.cfg
models_process_only_fp16_config=${basepath}/../config/models_process_only_fp16.cfg
ms_models_path=${basepath}/ms_models