!18917 [MS][LITE] add nnie ci scripts

Merge pull request !18917 from jianghui58/nnie_ci
This commit is contained in:
i-robot 2021-07-01 01:13:58 +00:00 committed by Gitee
commit 71f185ae1e
4 changed files with 324 additions and 0 deletions

View File

@ -0,0 +1,36 @@
classification/lenet lenet_deploy 1 1 0 0.5
classification/alexnet bvlc_alexnet_no_group_deploy_mark 1 1 0 0.5
classification/vgg16 vgg16_upgrade_deploy;1,224,224,3 1 1 0 0.5
classification/mobilenet mobilenet_v2_deploy_inplace_depthconv 1 1 0 0.5
detection/fasterRcnn/double_roi nnie_double_roi 1 1 300 0.5
detection/mtcnn det3 1 1 0 0.5
detection/rfcn/resnet50 rfcn_resnet50_nnie 1 1 100 0.5
detection/ssd nnie_ssd_deploy 1 1 0 0.5
detection/yolov1 yolo_small_deploy 1 1 0 0.5
lstm/lstm_fc lstm_fc_0;2,135,1,1:20,135,1,1 2 10 0 0.5
segmentation/segnet segnet_basic_batchnorm_scale_inference 1 1 0 0.5
tracker/goturn goturn_tracker_no_group_mark 1 1 0 0.5
ci_models detect-deeper-halfdeeper-mbv1-shortcut-400-400_nopostprocess_simplified 1 1 0 0.5
ci_models gender_res_large_deploy 1 1 0 0.5
ci_models hdc_age_medium 1 1 0 0.5
ci_models hdc_isface 1 1 0 0.5
ci_models hiai_cpu_face_detect 1 1 0 0.5
ci_models hiai_cv_aestheticsEngineModel_osp 1 1 0 0.5
ci_models hiai_cv_focusShootOCRModel_01 1 1 0 0.5
ci_models hiai_face_RFB-Epoch-170-no-transpose 1 1 0 0.5
ci_models landmark 1 1 0 0.5
ci_models ml_2012_ocr_detection_caffe_tmp 1 1 0 0.5
ci_models ml_hand_3d_detection 1 1 0 0.5
ci_models ml_hand_3d_regression 1 1 0 0.5
ci_models ml_ocr_detect_20200305 1 1 0 0.5
ci_models mtk_detect-deeper-halfdeeper-mbv1-lastearlySSD-shortcut-400-400_nopostprocess_simplified 1 1 0 0.5
ci_models mtk_detect-mbv1-shortcut-400-400_nopostprocess_simplified 1 1 0 0.5
ci_models mtk_detect_mbv1_640_480_nopostprocess_simplified 1 1 0 0.5
ci_models ml_Heatmap_depth_180240 2 1 0 0.5
ci_models ml_video_edit_person_divison_video 2 1 0 0.5
ci_models bolt_deploy_color-server 1 1 0 1.9
ci_models hiai_video_seg 1 1 0 3.0
ci_models ml_video_edit_have_imageProcessLayer_interpTo145_20201015 1 1 0 2.8
ci_models ml_ocr_identify_card_fcny 1 1 0 0.5
ci_models ml_2012_ocr_rec_caffe 1 1 0 0.5
ci_models ml_tabel_recog 1 1 0 0.5

View File

@ -114,3 +114,12 @@ if [[ $backend == "all" || $backend == "x86-all" || $backend == "x86" || $backen
exit 1
fi
fi
if [[ $backend == "all" || $backend == "arm32_3516D" ]]; then
sh $cur_path/scripts/nnie/run_converter_nnie.sh -r $release_path -m $models_path -d $device_id -e $backend
hi3516_status=$?
if [[ $hi3516_status -ne 0 ]]; then
echo "Run nnie hi3516 failed"
exit 1
fi
fi

View File

@ -0,0 +1,84 @@
#!/bin/bash
# Run on hi3516 platform:
function Run_Hi3516() {
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${basepath}:/usr/lib:/lib
# Run nnie converted models:
while read line; do
model_pass=${line:0:1}
if [[ $model_pass == \# ]]; then
continue
fi
nnie_line_info=${line}
model_info=`echo ${nnie_line_info}|awk -F ' ' '{print $2}'`
input_num=`echo ${nnie_line_info}|awk -F ' ' '{print $3}'`
env_time_step=`echo ${nnie_line_info}|awk -F ' ' '{print $4}'`
env_max_roi_num=`echo ${nnie_line_info}|awk -F ' ' '{print $5}'`
accuracy_limit=`echo ${nnie_line_info}|awk -F ' ' '{print $6}'`
model_name=${model_info%%;*}
length=`expr ${#model_name} + 1`
input_shapes=${model_info:${length}}
input_files=''
if [[ $input_num != 1 ]]; then
for i in $(seq 1 $input_num)
do
input_files=$input_files${basepath}'/../input_output/input/'${model_name}'.ms.bin_'$i','
done
else
input_files=${basepath}/../input_output/input/${model_name}.ms.bin
fi
export TIME_STEP=${env_time_step}
export MAX_ROI_NUM=${env_max_roi_num}
echo './benchmark --modelFile='${basepath}'/'${model_name}'.ms --inDataFile='${input_files}' --inputShapes='${input_shapes}' --benchmarkDataFile='${basepath}'/../input_output/output/'${model_name}'.ms.out --accuracyThreshold='${accuracy_limit} >> "${run_hi3516_log_file}"
./benchmark --modelFile=${basepath}/${model_name}.ms --inDataFile=${input_files} --inputShapes=${input_shapes} --benchmarkDataFile=${basepath}/../input_output/output/${model_name}.ms.out --accuracyThreshold=${accuracy_limit} >> "${run_hi3516_log_file}"
if [ $? = 0 ]; then
run_result='hi3516: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
else
run_result='hi3516: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_result_file}; return 1
fi
done < ${models_nnie_config}
}
# Print start msg after run testcase
function MS_PRINT_TESTCASE_END_MSG() {
echo -e "-----------------------------------------------------------------------------------------------------------------------------------"
}
basepath=$(pwd)
echo "on hi3516, bashpath is ${basepath}"
# Set models config filepath
models_nnie_config=${basepath}/models_nnie.cfg
echo ${models_nnie_config}
# Write benchmark result to temp file
run_benchmark_result_file=${basepath}/run_benchmark_result.txt
echo ' ' > ${run_benchmark_result_file}
run_hi3516_log_file=${basepath}/run_hi3516_log.txt
echo 'run hi3516 logs: ' > ${run_hi3516_log_file}
echo "Running in hi3516 ..."
Run_Hi3516 &
Run_hi3516_PID=$!
sleep 1
wait ${Run_hi3516_PID}
Run_benchmark_status=$?
# Check converter result and return value
if [[ ${Run_benchmark_status} = 0 ]];then
echo "Run benchmark success"
MS_PRINT_TESTCASE_END_MSG
cat ${run_benchmark_result_file}
MS_PRINT_TESTCASE_END_MSG
exit 0
else
echo "Run benchmark failed"
MS_PRINT_TESTCASE_END_MSG
cat ${run_benchmark_result_file}
MS_PRINT_TESTCASE_END_MSG
exit 1
fi

View File

@ -0,0 +1,195 @@
#!/bin/bash
# Run converter for NNIE models on x86 platform:
function Run_Converter() {
# Unzip x86 runtime and converter
cd ${x86_path} || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-linux-x64/ || exit 1
cp tools/converter/converter/converter_lite ./ || exit 1
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./tools/converter/lib/:./tools/converter/third_party/glog/lib:./tools/converter/providers/Hi3516D/third_party/opencv-4.2.0:./tools/converter/providers/Hi3516D/third_party/protobuf-3.9.0
export NNIE_MAPPER_PATH=./tools/converter/providers/Hi3516D/libnnie_mapper.so
export NNIE_DATA_PROCESS_PATH=./tools/converter/providers/Hi3516D/libmslite_nnie_data_process.so
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./runtime/lib/
export BENCHMARK_PATH=${x86_path}/mindspore-lite-${version}-linux-x64/tools/benchmark/benchmark
export NNIE_MODEL_NAME=model.ms
# generate converter_lite config file
ms_config_file=${x86_path}/converter_for_nnie.cfg
echo 'plugin_path='${x86_path}'/mindspore-lite-'${version}'-linux-x64/tools/converter/providers/Hi3516D/libmslite_nnie_converter.so' > ${ms_config_file}
echo -e 'disable_fusion=off\n' >> ${ms_config_file}
echo ' ' > ${run_converter_log_file}
rm -rf ${ms_models_path}
mkdir -p ${ms_models_path}
# Convert nnie models:
while read line; do
nnie_line_info=${line}
if [[ $nnie_line_info == \#* ]]; then
continue
fi
model_location=`echo ${nnie_line_info}|awk -F ' ' '{print $1}'`
model_info=`echo ${nnie_line_info}|awk -F ' ' '{print $2}'`
model_name=${model_info%%;*}
echo 'export NNIE_CONFIG_PATH='${models_path}'/'${model_location}'/'${model_name}'.cfg' >> "${run_converter_log_file}"
export NNIE_CONFIG_PATH=${models_path}/${model_location}/${model_name}.cfg
echo ${model_name} >> "${run_converter_log_file}"
echo './converter_lite --fmk=CAFFE --modelFile='${models_path}'/'${model_location}'/model/'${model_name}'.prototxt --weightFile='${models_path}'/'${model_location}'/model/'${model_name}'.caffemodel --configFile='${ms_config_file}' --outputFile='${ms_models_path}'/'${model_name}'' >> "${run_converter_log_file}"
./converter_lite --fmk=CAFFE --modelFile=${models_path}/${model_location}/model/${model_name}.prototxt --weightFile=${models_path}/${model_location}/model/${model_name}.caffemodel --configFile=${ms_config_file} --outputFile=${ms_models_path}/${model_name}
if [ $? = 0 ]; then
converter_result='converter CAFFE '${model_name}' pass';echo ${converter_result} >> ${run_converter_result_file}
else
converter_result='converter CAFFE '${model_name}' failed';echo ${converter_result} >> ${run_converter_result_file};return 1
fi
done < ${models_nnie_config}
}
# Run benchmark on hi3516:
function Run_Hi3516() {
cd ${arm32_path} || exit 1
tar -zxf mindspore-lite-${version}-linux-aarch32.tar.gz || exit 1
cd ${arm32_path}/mindspore-lite-${version}-linux-aarch32 || return 1
# copy related files to benchmark_test
cp -a ./providers/Hi3516D/libmslite_nnie.so ${benchmark_test_path}/libmslite_nnie.so || exit 1
cp -a ./providers/Hi3516D/libnnie_proposal.so ${benchmark_test_path}/libnnie_proposal.so || exit 1
cp -a ./providers/Hi3516D/benchmark ${benchmark_test_path}/benchmark || exit 1
cp -a ./runtime/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
# cp files to nfs shared folder
echo "start push files to hi3516"
echo ${device_ip}
sshpass -p "mindspore@123" scp ${benchmark_test_path}/* root@${device_ip}:/user/nnie/benchmark_test/ || exit 1
sshpass -p "mindspore@123" ssh root@${device_ip} "cd /user/nnie/benchmark_test; sh run_benchmark_nnie.sh"
if [ $? = 0 ]; then
run_result='hi3516: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file};
else
run_result='hi3516: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_result_file}; exit 1
fi
}
basepath=$(pwd)
echo ${basepath}
#set -e
# Example:sh run_nnie_nets.sh r /home/temp_test -m /home/temp_test/models -e arm32_3516D -d 192.168.1.1
while getopts "r:m:d:e:" opt; do
case ${opt} in
r)
release_path=${OPTARG}
echo "release_path is ${OPTARG}"
;;
m)
models_path=${OPTARG}
echo "models_path is ${OPTARG}"
;;
d)
device_ip=${OPTARG}
echo "device_ip is ${OPTARG}"
;;
e)
backend=${OPTARG}
echo "backend is ${OPTARG}"
;;
?)
echo "unknown para"
exit 1;;
esac
done
# Print start msg after run testcase
function MS_PRINT_TESTCASE_END_MSG() {
echo -e "-----------------------------------------------------------------------------------------------------------------------------------"
}
function Print_Converter_Result() {
MS_PRINT_TESTCASE_END_MSG
while read line; do
arr=("${line}")
printf "%-15s %-20s %-90s %-7s\n" ${arr[0]} ${arr[1]} ${arr[2]} ${arr[3]}
done < ${run_converter_result_file}
MS_PRINT_TESTCASE_END_MSG
}
x86_path=${release_path}/ubuntu_x86
arm32_path=${release_path}/linux_aarch32
file_name=$(ls ${x86_path}/*linux-x64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
# Set models config filepath
models_nnie_config=${basepath}/../config/models_nnie.cfg
run_hi3516_script=${basepath}/scripts/nnie/run_benchmark_nnie.sh
# Set ms models output path
ms_models_path=${basepath}/ms_models
# Write converter result to temp file
run_converter_log_file=${basepath}/run_converter_log.txt
echo ' ' > ${run_converter_log_file}
run_converter_result_file=${basepath}/run_converter_result.txt
echo ' ' > ${run_converter_result_file}
# Run converter
echo "start Run converter for NNIE models..."
Run_Converter &
Run_converter_PID=$!
sleep 1
wait ${Run_converter_PID}
Run_converter_status=$?
if [[ ${Run_converter_status} = 0 ]];then
echo "Run converter for NNIE models success"
Print_Converter_Result
else
echo "Run converter for NNIE models failed"
cat ${run_converter_log_file}
Print_Converter_Result
exit 1
fi
# Write benchmark result to temp file
run_benchmark_result_file=${basepath}/run_benchmark_result.txt
echo ' ' > ${run_benchmark_result_file}
# Copy the MindSpore models:
benchmark_test_path=${basepath}/benchmark_test
rm -rf ${benchmark_test_path}
mkdir -p ${benchmark_test_path}
cp -a ${ms_models_path}/*.ms ${benchmark_test_path} || exit 1
cp -a ${models_nnie_config} ${benchmark_test_path} || exit 1
cp -a ${run_hi3516_script} ${benchmark_test_path} || exit 1
if [[ $backend == "all" || $backend == "arm32_3516D" ]]; then
# Run on hi3516
file_name=$(ls ${arm32_path}/*linux-aarch32.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
Run_Hi3516 &
Run_hi3516_PID=$!
sleep 1
fi
if [[ $backend == "all" || $backend == "arm32_3516D" ]]; then
wait ${Run_hi3516_PID}
Run_hi3516_status=$?
# Check benchmark result and return value
if [[ ${Run_hi3516_status} != 0 ]];then
echo "Run_hi3516 failed"
isFailed=1
else
echo "Run_hi3516 success"
isFailed=0
fi
fi
if [[ $isFailed == 1 ]]; then
exit 1
fi
exit 0