forked from mindspore-Ecosystem/mindspore
add models to the entrance guard and modify tf script
This commit is contained in:
parent
d17b2c98e3
commit
8cf4f102ce
|
@ -108,4 +108,11 @@ ml_hand_3d_regression
|
|||
ml_ARengine23_bodypose
|
||||
ml_ocr_bank_card_detection_inception
|
||||
ml_ocr_bank_card_recognition_fcny
|
||||
hiai_cv_aestheticsEngineModel_osp
|
||||
hiai_cv_aestheticsEngineModel_osp
|
||||
bank_card_recognition_fcny
|
||||
bank_card_detection_inception
|
||||
ml_ocr_identify_card_fcny
|
||||
ml_ocr_identify_card_detect
|
||||
identify_card_detect
|
||||
ml_2012_ocr_rec_caffe
|
||||
ml_2012_ocr_detection_caffe
|
||||
|
|
|
@ -103,4 +103,12 @@ ml_hand_3d_regression 3
|
|||
ml_ARengine23_bodypose 56
|
||||
ml_ocr_bank_card_detection_inception 20
|
||||
ml_ocr_bank_card_recognition_fcny 0.5
|
||||
hiai_cv_aestheticsEngineModel_osp 1.1
|
||||
hiai_cv_aestheticsEngineModel_osp 1.5
|
||||
ml_face_hat 0.5
|
||||
bank_card_recognition_fcny 17
|
||||
bank_card_detection_inception 12
|
||||
ml_ocr_identify_card_fcny 0.5
|
||||
ml_ocr_identify_card_detect 2
|
||||
identify_card_detect 0.5
|
||||
ml_2012_ocr_detection_caffe 1
|
||||
ml_2012_ocr_rec_caffe 0.5
|
||||
|
|
|
@ -71,3 +71,4 @@ mtk_detect_mbv1_640_480_nopostprocess_simplified_onnx.onnx;1,480,640,3
|
|||
mtk_face_features_v2.onnx;1,256,192,3
|
||||
mtk_face_recognition_v3.onnx
|
||||
mtk_face_recognition_v2.onnx
|
||||
ml_2012_ocr_detection.onnx
|
||||
|
|
|
@ -60,14 +60,15 @@ ml_location_lane_counter.onnx 6
|
|||
#The encoder an decoder model are used in ml_asr scene, both have value overflow. Not suitable for fp16.
|
||||
#But added for guarding process.
|
||||
encoder.onnx;1,32,83 1262
|
||||
mtk_emotions-d2012-75.onnx 4.73
|
||||
mtk_detect-mbv1-shortcut-400-400.onnx 0.042
|
||||
mtk_detect-mbv2-shortcut-400-400.onnx 0.1
|
||||
mtk_detect_mbv1_640_480.onnx 0.07
|
||||
mtk_detect-deeper-halfdeeper-mbv1-shortcut-400-400_nopostprocess_simplified_onnx.onnx 1.52
|
||||
mtk_detect-mbv1-shortcut-400-400_nopostprocess_simplified_onnx.onnx 6.2
|
||||
mtk_detect-deeper-halfdeeper-mbv1-lastearlySSD-shortcut-400-400_nopostprocess_simplified_onnx.onnx 2.2
|
||||
mtk_detect_mbv1_640_480_nopostprocess_simplified_onnx.onnx;1,480,640,3 1.8
|
||||
mtk_face_features_v2.onnx;1,256,192,3 0.04
|
||||
mtk_face_recognition_v3.onnx 0.21
|
||||
mtk_face_recognition_v2.onnx 2.45
|
||||
mtk_emotions-d2012-75.onnx 5
|
||||
mtk_detect-mbv1-shortcut-400-400.onnx 0.5
|
||||
mtk_detect-mbv2-shortcut-400-400.onnx 0.5
|
||||
mtk_detect_mbv1_640_480.onnx 0.5
|
||||
mtk_detect-deeper-halfdeeper-mbv1-shortcut-400-400_nopostprocess_simplified_onnx.onnx 2
|
||||
mtk_detect-mbv1-shortcut-400-400_nopostprocess_simplified_onnx.onnx 6.5
|
||||
mtk_detect-deeper-halfdeeper-mbv1-lastearlySSD-shortcut-400-400_nopostprocess_simplified_onnx.onnx 2.5
|
||||
mtk_detect_mbv1_640_480_nopostprocess_simplified_onnx.onnx;1,480,640,3 2
|
||||
mtk_face_features_v2.onnx;1,256,192,3 0.5
|
||||
mtk_face_recognition_v3.onnx 0.5
|
||||
mtk_face_recognition_v2.onnx 2.5
|
||||
ml_2012_ocr_detection.onnx 0.5
|
||||
|
|
|
@ -1,66 +1,59 @@
|
|||
decoder_step_201217.pb 5
|
||||
decoder_step_201217_modified.pb 5
|
||||
unet_model_reconstruct.pb 1;1,256,256,3
|
||||
encoder_201228.pb 3;1:1,22:1
|
||||
female_model_step2_int16_noiseout.pb 66
|
||||
encoder_0111_control_flow.pb 4;1:1,44:1:1
|
||||
encoder_0111.pb 4;1:1,44:1:1
|
||||
ml_video_edit_generate_filter.pb 1
|
||||
densenet.pb 1;1,224,224,3
|
||||
inception_resnet_v2.pb 1;1,299,299,3
|
||||
inception_v3.pb 1;1,299,299,3
|
||||
inception_v4.pb 1;1,299,299,3
|
||||
mnasnet_1.0_224.pb 1
|
||||
mnasnet_1.3_224.pb 1
|
||||
mobilenet_v1_0.25_128_frozen.pb 1;1,128,128,3
|
||||
mobilenet_v2_1.0_224_frozen.pb 1;1,224,224,3
|
||||
nasnet_large.pb 1;1,331,331,3
|
||||
nasnet_mobile.pb 1;1,224,224,3
|
||||
squeezenet.pb 1;1,224,224,3
|
||||
ml_ei_headpose.pb 1;1,64,64,3
|
||||
ml_ei_landmark.pb 1;1,160,160,3
|
||||
ml_face_openclose.pb 1;1,32,32,3
|
||||
ml_object_detect.pb 1;1,288,288,3
|
||||
ml_ocr_jk.pb 1
|
||||
ml_video_edit_enhance.pb 1
|
||||
ml_vision_guide_detection1.pb 1
|
||||
ml_vision_guide_detection3.pb 1
|
||||
scan_hms_angle.pb 1
|
||||
scan_hms_detect.pb 1
|
||||
hiai_AADB_HADB_MBV2_model.pb 1;1,224,224,3
|
||||
hiai_cn_recognize_modify_padv2.pb 1;1,32,512,1
|
||||
hiai_cpu_face_emotion.pb 1
|
||||
hiai_cpu_face_gazing.pb 1
|
||||
hiai_cpu_face_headpose.pb 1
|
||||
hiai_ctpn_feature_map.pb 1
|
||||
hiai_cv_focusShootOCRModel_02.pb 1
|
||||
hiai_cv_focusShootOCRModel_08.pb 1
|
||||
hiai_cv_poseEstimation.pb 1
|
||||
hiai_detectmodel_06_23_960_480_1180700.pb 1
|
||||
hiai_dress_detect.pb 1;1,960,960,3
|
||||
hiai_face_model_npu.pb 1
|
||||
hiai_frozen_inference_graph.pb 1;1,300,300,3
|
||||
hiai_ghostnet.pb 1
|
||||
hiai_iMaxDN_RGB.pb 1
|
||||
hiai_iMaxSR_RGB.pb 1
|
||||
hiai_label_and_video.pb 1;1,224,224,3
|
||||
hiai_latin_ocr.pb 1
|
||||
hiai_latin_ocr_1.pb 1
|
||||
hiai_lm_inference_graph.pb 1
|
||||
hiai_model_0909_kd_rot_ps_softmax.pb 1;1,224,224,3
|
||||
hiai_PoseEstimation_Pcm.pb 1
|
||||
model_normalize_object_scene_ps_20200519.pb 1;1,224,224,3
|
||||
mtk_AADB_HADB_MBV2_model.pb 1;1,224,224,3
|
||||
mtk_AADB_HADB_MBV3_model.pb 1;1,224,224,3
|
||||
mtk_age_gender.pb 1
|
||||
mtk_model_ckpt.pb 1
|
||||
mtk_model_face_dress.pb 1;1,128,128,3
|
||||
mtk_model_normalize_object_scene_ps_20200519.pb 1;1,224,224,3
|
||||
ml_ocr_latin.pb 1
|
||||
ml_noya_tts_melgan.pb 1;16,16,80
|
||||
ml_video_edit_oneclick_adaptis.pb 3
|
||||
unet_model_reconstruct.pb;1,256,256,3
|
||||
ml_video_edit_generate_filter.pb
|
||||
densenet.pb;1,224,224,3
|
||||
inception_resnet_v2.pb;1,299,299,3
|
||||
inception_v3.pb;1,299,299,3
|
||||
inception_v4.pb;1,299,299,3
|
||||
mnasnet_1.0_224.pb
|
||||
mnasnet_1.3_224.pb
|
||||
mobilenet_v1_0.25_128_frozen.pb;1,128,128,3
|
||||
mobilenet_v2_1.0_224_frozen.pb;1,224,224,3
|
||||
nasnet_large.pb;1,331,331,3
|
||||
nasnet_mobile.pb;1,224,224,3
|
||||
squeezenet.pb;1,224,224,3
|
||||
ml_ei_headpose.pb;1,64,64,3
|
||||
ml_ei_landmark.pb;1,160,160,3
|
||||
ml_face_openclose.pb;1,32,32,3
|
||||
ml_object_detect.pb;1,288,288,3
|
||||
ml_ocr_jk.pb
|
||||
ml_video_edit_enhance.pb
|
||||
ml_vision_guide_detection1.pb
|
||||
ml_vision_guide_detection3.pb
|
||||
scan_hms_angle.pb
|
||||
scan_hms_detect.pb
|
||||
hiai_AADB_HADB_MBV2_model.pb;1,224,224,3
|
||||
hiai_cn_recognize_modify_padv2.pb;1,32,512,1
|
||||
hiai_cpu_face_emotion.pb
|
||||
hiai_cpu_face_gazing.pb
|
||||
hiai_cpu_face_headpose.pb
|
||||
hiai_ctpn_feature_map.pb
|
||||
hiai_cv_focusShootOCRModel_02.pb
|
||||
hiai_cv_focusShootOCRModel_08.pb
|
||||
hiai_cv_poseEstimation.pb
|
||||
hiai_detectmodel_06_23_960_480_1180700.pb
|
||||
hiai_dress_detect.pb;1,960,960,3
|
||||
hiai_face_model_npu.pb
|
||||
hiai_frozen_inference_graph.pb;1,300,300,3
|
||||
hiai_ghostnet.pb
|
||||
hiai_iMaxDN_RGB.pb
|
||||
hiai_iMaxSR_RGB.pb
|
||||
hiai_label_and_video.pb;1,224,224,3
|
||||
hiai_latin_ocr.pb
|
||||
hiai_latin_ocr_1.pb
|
||||
hiai_lm_inference_graph.pb
|
||||
hiai_model_0909_kd_rot_ps_softmax.pb;1,224,224,3
|
||||
hiai_PoseEstimation_Pcm.pb
|
||||
model_normalize_object_scene_ps_20200519.pb;1,224,224,3
|
||||
mtk_AADB_HADB_MBV2_model.pb;1,224,224,3
|
||||
mtk_AADB_HADB_MBV3_model.pb;1,224,224,3
|
||||
mtk_age_gender.pb
|
||||
mtk_model_ckpt.pb
|
||||
mtk_model_face_dress.pb;1,128,128,3
|
||||
mtk_model_normalize_object_scene_ps_20200519.pb;1,224,224,3
|
||||
ml_ocr_latin.pb
|
||||
ml_noya_tts_melgan.pb;16,16,80
|
||||
# Q_hand_0812.pb is not suitable for float16. Out of float16 range.
|
||||
Q_hand_0812.pb
|
||||
tacotron_encoder_stf.pb 5;1:1,62:1,62:1,62:1,62
|
||||
Q_inception-249970-672-11-16.pb 1
|
||||
Q_dila-small-mix-full-fineturn-390000-nopixel-nosigmoid.pb 1
|
||||
Q_inception-249970-672-11-16.pb
|
||||
Q_dila-small-mix-full-fineturn-390000-nopixel-nosigmoid.pb
|
||||
Q_crnn_screen_slim400w_more_20w.pb
|
||||
|
|
|
@ -5,3 +5,7 @@ ml_ocr_jk.pb 0.5
|
|||
ml_ocr_latin.pb 135
|
||||
scan_hms_angle.pb 1.5
|
||||
scan_hms_detect.pb 2.5
|
||||
ml_face_openclose.pb;1,32,32,3 0.5
|
||||
ml_object_detect.pb;1,288,288,3 2
|
||||
Q_crnn_screen_slim400w_more_20w.pb 137
|
||||
Q_inception-249970-672-11-16.pb 6.5
|
||||
|
|
|
@ -163,4 +163,5 @@ hiai_AADB_HADB_MBV2_model_f16.tflite 2.4
|
|||
hiai_AADB_HADB_MBV2_model_fp32.tflite 4.1
|
||||
mtk_age_gender_fp16.tflite 25.5
|
||||
#hiai_cv_labelDetectorModel_v3.tflite 5.1
|
||||
#hiai_detect_curve_model_float32.tflite 8.98
|
||||
#hiai_detect_curve_model_float32.tflite 8.98
|
||||
Q_language_model_hrmini_Q4_b4_17w.tflite 3.5
|
||||
|
|
|
@ -11,3 +11,16 @@ decoder.onnx;2;1,7,512:1,7
|
|||
fasterrcnn_crop.pb;1;420,630,3
|
||||
ml_video_edit_person_divison_video;2
|
||||
hdc_tb_cn_neg.tflite;3
|
||||
decoder_step_201217.pb;5
|
||||
decoder_step_201217_modified.pb;5
|
||||
encoder_201228.pb;3;1:1,22:1
|
||||
encoder_0111_control_flow.pb;4;1:1,44:1:1
|
||||
encoder_0111.pb;4;1:1,44:1:1
|
||||
ml_video_edit_oneclick_adaptis.pb;3
|
||||
tacotron_encoder_stf.pb;5;1:1,62:1,62:1,62:1,62
|
||||
female_model_step2_int16_noiseout.pb;66
|
||||
ml_female_model_step6_noiseout.pb;66
|
||||
ml_male_model_step6_noiseout.pb;66
|
||||
ml_tts_decoder_control_flow.pb;5
|
||||
ml_tts_decoder.pb;5
|
||||
ml_tts_encoder_control_flow.pb;4;1:1,22:1:1
|
||||
|
|
|
@ -9,4 +9,8 @@ decoder_step_201217.pb;5 187
|
|||
ml_video_edit_art_transfer.onnx;3 3
|
||||
decoder.onnx;2;1,7,512:1,7 113
|
||||
decoder_step_201217_modified.pb;5 0.5
|
||||
encoder_0111_control_flow.pb;4;1:1,44:1:1 10
|
||||
#encoder_0111_control_flow.pb;4;1:1,44:1:1 10
|
||||
ml_female_model_step6_noiseout.pb;66 2
|
||||
ml_male_model_step6_noiseout.pb;66 2.5
|
||||
ml_tts_decoder_control_flow.pb;5 1
|
||||
ml_tts_decoder.pb;5 117
|
||||
|
|
|
@ -15,12 +15,10 @@ function Run_Converter() {
|
|||
|
||||
# Convert tf models:
|
||||
while read line; do
|
||||
tf_line_info=${line}
|
||||
if [[ $tf_line_info == \#* ]]; then
|
||||
model_name=${line%;*}
|
||||
if [[ $model_name == \#* ]]; then
|
||||
continue
|
||||
fi
|
||||
model_name=`echo ${tf_line_info}|awk -F ' ' '{print $1}'`
|
||||
input_num=`echo ${tf_line_info}|awk -F ' ' '{print $2}'`
|
||||
echo ${model_name} >> "${run_converter_log_file}"
|
||||
echo './converter_lite --fmk=TF --modelFile='${models_path}'/'${model_name}' --outputFile='${ms_models_path}'/'${model_name}'' >> "${run_converter_log_file}"
|
||||
./converter_lite --fmk=TF --modelFile=$models_path/${model_name} --outputFile=${ms_models_path}/${model_name}
|
||||
|
@ -398,31 +396,15 @@ function Run_x86() {
|
|||
|
||||
# Run tf converted models:
|
||||
while read line; do
|
||||
model_name_and_input_num=${line%;*}
|
||||
length=${#model_name_and_input_num}
|
||||
model_name=${line%;*}
|
||||
length=${#model_name}
|
||||
input_shapes=${line:length+1}
|
||||
tf_line_info=${model_name_and_input_num}
|
||||
if [[ $tf_line_info == \#* ]]; then
|
||||
if [[ $model_name == \#* ]]; then
|
||||
continue
|
||||
fi
|
||||
model_name=`echo ${tf_line_info}|awk -F ' ' '{print $1}'`
|
||||
input_num=`echo ${tf_line_info}|awk -F ' ' '{print $2}'`
|
||||
input_files=''
|
||||
for i in $(seq 1 $input_num)
|
||||
do
|
||||
input_files=$input_files'/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'$model_name'.ms_'$i'.bin,'
|
||||
done
|
||||
echo ${model_name} >> "${run_x86_log_file}"
|
||||
echo './benchmark --modelFile='${ms_models_path}'/'${model_name}'.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${input_files}' --inputShapes='${input_shapes}' --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out' >> "${run_x86_log_file}"
|
||||
./benchmark --modelFile=${ms_models_path}/${model_name}.ms --inDataFile=${input_files} --inputShapes=${input_shapes} --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out >> "${run_x86_log_file}"
|
||||
if [ $? = 0 ]; then
|
||||
run_result='x86: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
||||
else
|
||||
run_result='x86: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_result_file}; return 1
|
||||
fi
|
||||
# run benchmark test with input data
|
||||
echo './benchmark --modelFile='${ms_models_path}'/'${model_name}'.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${input_files}' --inputShapes='${input_shapes} >> "${run_x86_log_file}"
|
||||
./benchmark --modelFile=${ms_models_path}/${model_name}.ms --inDataFile=${input_files} --inputShapes=${input_shapes} >> "${run_x86_log_file}"
|
||||
echo './benchmark --modelFile='${ms_models_path}'/'${model_name}'.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --inputShapes='${input_shapes}' --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out' >> "${run_x86_log_file}"
|
||||
./benchmark --modelFile=${ms_models_path}/${model_name}.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --inputShapes=${input_shapes} --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out >> "${run_x86_log_file}"
|
||||
if [ $? = 0 ]; then
|
||||
run_result='x86: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
||||
else
|
||||
|
@ -1296,34 +1278,27 @@ function Run_arm64() {
|
|||
|
||||
# Run tf converted models:
|
||||
while read line; do
|
||||
model_name_and_input_num=${line%;*}
|
||||
length=${#model_name_and_input_num}
|
||||
model_name=${line%;*}
|
||||
length=${#model_name}
|
||||
input_shapes=${line:length+1}
|
||||
tf_line_info=${model_name_and_input_num}
|
||||
if [[ $tf_line_info == \#* ]]; then
|
||||
if [[ $model_name == \#* ]]; then
|
||||
continue
|
||||
fi
|
||||
model_name=`echo ${tf_line_info}|awk -F ' ' '{print $1}'`
|
||||
input_num=`echo ${tf_line_info}|awk -F ' ' '{print $2}'`
|
||||
input_files=''
|
||||
for i in $(seq 1 $input_num)
|
||||
do
|
||||
input_files=$input_files'/data/local/tmp/input_output/input/'$model_name'.ms_'$i'.bin,'
|
||||
done
|
||||
echo ${model_name} >> "${run_arm64_fp32_log_file}"
|
||||
echo 'cd /data/local/tmp/benchmark_test' > adb_run_cmd.txt
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --inputShapes='${input_shapes}' --modelFile='${model_name}'.ms --inDataFile='${input_files}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out' >> "${run_arm64_fp32_log_file}"
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --inputShapes='${input_shapes}' --modelFile='${model_name}'.ms --inDataFile='${input_files}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out' >> adb_run_cmd.txt
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inDataFile=/data/local/tmp/input_output/input/'${model_name}'.ms.bin --inputShapes='${input_shapes}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out' >> "${run_arm64_fp32_log_file}"
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inDataFile=/data/local/tmp/input_output/input/'${model_name}'.ms.bin --inputShapes='${input_shapes}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out' >> adb_run_cmd.txt
|
||||
adb -s ${device_id} shell < adb_run_cmd.txt >> "${run_arm64_fp32_log_file}"
|
||||
if [ $? = 0 ]; then
|
||||
run_result='arm64: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
||||
else
|
||||
run_result='arm64: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_result_file}; return 1
|
||||
fi
|
||||
# run benchmark test with input data
|
||||
# run benchmark test without clib data
|
||||
echo ${model_name} >> "${run_arm64_fp32_log_file}"
|
||||
echo 'cd /data/local/tmp/benchmark_test' > adb_run_cmd.txt
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --inputShapes='${input_shapes}' --modelFile='${model_name}'.ms --inDataFile='${input_files}' --warmUpLoopCount=1 --loopCount=2' >> "${run_arm64_fp32_log_file}"
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --inputShapes='${input_shapes}' --modelFile='${model_name}'.ms --inDataFile='${input_files}' --warmUpLoopCount=1 --loopCount=2' >> adb_run_cmd.txt
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inputShapes='${input_shapes}' --warmUpLoopCount=1 --loopCount=2' >> "{run_arm64_fp32_log_file}"
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inputShapes='${input_shapes}' --warmUpLoopCount=1 --loopCount=2' >> adb_run_cmd.txt
|
||||
adb -s ${device_id} shell < adb_run_cmd.txt >> "${run_arm64_fp32_log_file}"
|
||||
if [ $? = 0 ]; then
|
||||
run_result='arm64: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
||||
|
@ -1627,6 +1602,17 @@ function Run_arm64() {
|
|||
else
|
||||
run_result='arm64: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_result_file}; return 1
|
||||
fi
|
||||
# run benchmark test without clib data
|
||||
echo ${model_name} >> "${run_arm64_fp32_log_file}"
|
||||
echo 'cd /data/local/tmp/benchmark_test' > adb_run_cmd.txt
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inDataFile='${input_files}' --inputShapes='${input_shapes} ' --warmUpLoopCount=1 --loopCount=2' >> "${run_arm64_fp32_log_file}"
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inDataFile='${input_files}' --inputShapes='${input_shapes} ' --warmUpLoopCount=1 --loopCount=2' >> adb_run_cmd.txt
|
||||
adb -s ${device_id} shell < adb_run_cmd.txt >> "${run_arm64_fp32_log_file}"
|
||||
if [ $? = 0 ]; then
|
||||
run_result='arm64: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
||||
else
|
||||
run_result='arm64: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_result_file}; return 1
|
||||
fi
|
||||
done < ${models_with_multiple_inputs_config}
|
||||
|
||||
# Run converted models which does not need to be cared about the accuracy:
|
||||
|
@ -1845,7 +1831,7 @@ function Run_arm64_fp16() {
|
|||
while read line; do
|
||||
fp16_line_info=${line}
|
||||
if [[ $fp16_line_info == \#* ]]; then
|
||||
continue-
|
||||
continue
|
||||
fi
|
||||
model_info=`echo ${fp16_line_info}|awk -F ' ' '{print $1}'`
|
||||
accuracy_limit=`echo ${fp16_line_info}|awk -F ' ' '{print $2}'`
|
||||
|
|
Loading…
Reference in New Issue