!27971 [MS][LITE] add dpico 3403 simulation ci

Merge pull request !27971 from jianghui58/dpico_dev_compile
This commit is contained in:
i-robot 2021-12-23 03:36:45 +00:00 committed by Gitee
commit e2971c6826
5 changed files with 80 additions and 27 deletions

View File

@ -18,6 +18,27 @@
#include <math.h>
#include "nnacl/infer/infer_register.h"
int ComputePadList(PoolingParameter *param, int input_h, int input_w, int output_h, int output_w) {
if (param == NULL) {
return NNACL_NULL_PTR;
}
int pad_h_all = ((output_h - 1) * param->stride_h_ + (param->window_h_ - 1) + 1 - input_h);
int pad_w_all = ((output_w - 1) * param->stride_w_ + (param->window_w_ - 1) + 1 - input_w);
if (pad_h_all < 0) {
param->pad_u_ = param->pad_d_ = 0;
} else {
param->pad_u_ = pad_h_all / 2;
param->pad_d_ = pad_h_all - param->pad_u_;
}
if (pad_w_all < 0) {
param->pad_l_ = param->pad_r_ = 0;
} else {
param->pad_l_ = pad_w_all / 2;
param->pad_r_ = pad_w_all - param->pad_l_;
}
return NNACL_OK;
}
int PoolingInferShape(const TensorC *const *inputs, size_t inputs_size, TensorC **outputs, size_t outputs_size,
OpParameter *parameter) {
int check_ret = CheckAugmentWithMinSize(inputs, inputs_size, outputs, outputs_size, parameter, 1, 1);
@ -29,8 +50,10 @@ int PoolingInferShape(const TensorC *const *inputs, size_t inputs_size, TensorC
if (input->format_ != Format_NHWC) {
return NNACL_FORMAT_ERROR;
}
TensorC *output = outputs[0];
SetDataTypeFormat(output, input);
for (size_t i = 0; i < outputs_size; i++) {
TensorC *output = outputs[i];
SetDataTypeFormat(output, input);
}
PoolingParameter *param = (PoolingParameter *)parameter;
if (!InferFlag(inputs, inputs_size)) {
return NNACL_INFER_INVALID;
@ -52,22 +75,11 @@ int PoolingInferShape(const TensorC *const *inputs, size_t inputs_size, TensorC
if (param->stride_h_ == 0 || param->stride_w_ == 0) {
return NNACL_PARAM_INVALID;
}
if (param->pad_mode_ == Pad_same) { // maybe error
if (param->pad_mode_ == Pad_same) {
output_w = ceil((float)(input_w) / (float)(param->stride_w_));
output_h = ceil((float)(input_h) / (float)(param->stride_h_));
int pad_h_all = ((output_h - 1) * param->stride_h_ + (window_h - 1) + 1 - input_h);
int pad_w_all = ((output_w - 1) * param->stride_w_ + (window_w - 1) + 1 - input_w);
if (pad_h_all < 0) {
param->pad_u_ = param->pad_d_ = 0;
} else {
param->pad_u_ = pad_h_all / 2;
param->pad_d_ = pad_h_all - param->pad_u_;
}
if (pad_w_all < 0) {
param->pad_l_ = param->pad_r_ = 0;
} else {
param->pad_l_ = pad_w_all / 2;
param->pad_r_ = pad_w_all - param->pad_l_;
if (ComputePadList(param, input_h, input_w, output_h, output_w) != NNACL_OK) {
return NNACL_NULL_PTR;
}
} else {
int round_mode = (RoundMode)param->round_mode_;
@ -86,7 +98,10 @@ int PoolingInferShape(const TensorC *const *inputs, size_t inputs_size, TensorC
ShapeSet(input_shape, &input_shape_size, input->shape_, input->shape_size_);
input_shape[1] = output_h > 0 ? output_h : 1;
input_shape[2] = output_w > 0 ? output_w : 1;
SetShapeArray(output, input_shape, input_shape_size);
for (size_t i = 0; i < outputs_size; i++) {
TensorC *output = outputs[i];
SetShapeArray(output, input_shape, input_shape_size);
}
return NNACL_OK;
}

View File

@ -0,0 +1,6 @@
1_classification/resnet50_async_imagenet_classification resnet50 1
3_segmentation/segnet segnet 1
5_nlp/lstm lstm_modify 2
5_nlp/gru/gru_cont_b1_p1_fmS_xi17_xsi21_lo27_fn1_fco0afo0_im0 gru_p1_modify 2
5_nlp/rnn/rnn_cont_b1_p1_fmS_xi33_xsi36_lo44_fn1_fco0_im0 rnn_p1_modify 2
6_tracker/goturn goturn_tracker_no_group_mark 2

View File

@ -0,0 +1,15 @@
onnx bloom_hongmo_detection_tmp.onnx 1
onnx densenet-9.onnx 1
onnx gts_version-RFB-320_simplified.onnx 1
onnx hdc_Face_Emotion_MTI_Aesthetic.onnx;input:1,3,224,224 1
onnx hdc_mobilenet_1w_class.onnx 1
onnx inception-v2-9.onnx 1
onnx ml_ei_facedetection.onnx;input:1,3,240,320 1
onnx ml_table_segment.onnx 1
onnx ml_video_edit_makeup_mobilenetv203.onnx 1
onnx mtk_detect-deeper-halfdeeper-mbv1-shortcut-400-400_nopostprocess_simplified_onnx.onnx 1
onnx mtk_detect_mbv1_640_480.onnx;input:1,3,480,640 1
onnx mtk_detect-mbv2-shortcut-400-400.onnx;input:1,3,400,400 1
onnx mtk_emotions-d2012-75.onnx 1
onnx residual_distill_cifar10_bs_1.onnx;actual_input:1,3,32,32 1
onnx squeezenet1.0-9.onnx 1

View File

@ -115,6 +115,15 @@ if [[ $backend == "all" || $backend == "arm32_3516D" ]]; then
fi
fi
if [[ $backend == "all" || $backend == "simulation_sd3403" ]]; then
sh $cur_path/scripts/dpico/run_simulation_3403.sh -r $release_path -m $models_path -e $backend
simulation_sd3403_status=$?
if [[ simulation_sd3403_status -ne 0 ]]; then
echo "Run dpico simulation_sd3403 failed."
exit 1
fi
fi
if [[ $backend == "all" || $backend == "arm64_cpu_cropping" ]]; then
sh $cur_path/scripts/run_benchmark_cropping_size.sh -r $release_path -m $models_path -d $device_id -e $backend
cpu_cropping_status=$?

View File

@ -71,15 +71,15 @@ function Run_Convert_MODELS() {
# Run converter for DPICO models on x86 platform:
function Run_Converter() {
cd ${x86_path} || exit 1
tar -zxf mindspore-enterprise-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-enterprise-lite-${version}-linux-x64/ || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-linux-x64/ || exit 1
# atc tool
cp tools/converter/providers/SD3403/third_party/pico_mapper/bin/atc ./ || exit 1
cp ${dpico_atc_path}/pico_mapper/bin/atc ./ || exit 1
chmod +x atc
cp tools/converter/converter/converter_lite ./ || exit 1
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./tools/converter/lib/:./runtime/lib/:./tools/converter/providers/SD3403/third_party/pico_mapper/lib:./tools/converter/providers/SD3403/third_party/protobuf-3.9.0/lib:./tools/converter/providers/SD3403/third_party/opencv-4.2.0/lib
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./tools/converter/lib/:./runtime/lib/:${dpico_atc_path}/pico_mapper/lib:${dpico_atc_path}/protobuf-3.9.0/lib:${dpico_atc_path}/opencv-4.2.0/lib
chmod +x ./tools/benchmark/benchmark
echo ' ' > ${run_converter_log_file}
@ -162,7 +162,7 @@ function Run_Func_Sim() {
# Run benchmark on 3403:
function Run_Simulation() {
cd ${om_generated_path} || exit 1
wget http://mindspore-repo.csi.rnd.huawei.com/mindspore/enterprise/dpico/func_sim || exit 1
cp ${dpico_atc_path}/simulation/func_sim ./ || exit 1
chmod +x func_sim
Run_Func_Sim ${models_onnx_3403_config}
@ -188,7 +188,6 @@ function Run_Simulation() {
basepath=$(pwd)
echo ${basepath}
# Example:sh run_dpico_nets.sh r /home/temp_test -m /home/temp_test/models -e arm32_3403D -d 192.168.1.1
while getopts "r:m:e:" opt; do
case ${opt} in
r)
@ -223,12 +222,21 @@ function Print_Converter_Result() {
MS_PRINT_TESTCASE_END_MSG
}
# get sdk path
if [ "${HISI_SDK_PATH}" ]; then
hisi_sdk=${HISI_SDK_PATH}
else
echo "HISI_SDK_PATH env not found"
exit 1
fi
dpico_atc_path=${hisi_sdk}/sd3403_sdk/dpico_atc_adapter
x86_path=${release_path}/ubuntu_x86
# Set version
file_name=$(ls ${x86_path}/*linux-x64.tar.gz)
file_name=$(ls ${x86_path}/*-linux-x64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[3]}
version=${file_name_array[2]}
# Set filepath
models_caffe_3403_config=${basepath}/../config/models_caffe_3403_simulation.cfg
@ -269,14 +277,14 @@ run_simulation_result_file=${basepath}/run_simulation_3403_result.txt
rm ${run_simulation_result_file}
echo ' ' > ${run_simulation_result_file}
if [[ $backend == "all" || $backend == "simulation_3403" ]]; then
if [[ $backend == "all" || $backend == "simulation_sd3403" ]]; then
# Run funcsim
Run_Simulation &
Run_Simulation_PID=$!
sleep 1
fi
if [[ $backend == "all" || $backend == "simulation_3403" ]]; then
if [[ $backend == "all" || $backend == "simulation_sd3403" ]]; then
wait ${Run_Simulation_PID}
Run_Simulation_status=$?
if [[ ${Run_Simulation_status} != 0 ]];then