forked from mindspore-Ecosystem/mindspore
!7377 [MSLITE] Add ci test for onnx model including lstm operators.
Merge pull request !7377 from wangshaocong/ci_master
This commit is contained in:
commit
80368dead3
|
@ -329,10 +329,6 @@ void Conv2D::ConvInferShape(int input_h, int input_w, int *output_h, int *output
|
||||||
int stride_h = GetStrideH();
|
int stride_h = GetStrideH();
|
||||||
int dilate_w = GetDilateW();
|
int dilate_w = GetDilateW();
|
||||||
int dilate_h = GetDilateH();
|
int dilate_h = GetDilateH();
|
||||||
pad_l_ = GetPadLeft();
|
|
||||||
pad_u_ = GetPadUp();
|
|
||||||
pad_d_ = GetPadDown();
|
|
||||||
pad_r_ = GetPadRight();
|
|
||||||
|
|
||||||
if (GetPadMode() == schema::PadMode_SAME_UPPER) {
|
if (GetPadMode() == schema::PadMode_SAME_UPPER) {
|
||||||
*output_w = std::ceil(static_cast<float>(input_w) / static_cast<float>(stride_w));
|
*output_w = std::ceil(static_cast<float>(input_w) / static_cast<float>(stride_w));
|
||||||
|
@ -378,6 +374,11 @@ int Conv2D::InferShape(std::vector<Tensor *> inputs_, std::vector<Tensor *> outp
|
||||||
|
|
||||||
out_tensor->SetFormat(input_tensor->GetFormat());
|
out_tensor->SetFormat(input_tensor->GetFormat());
|
||||||
out_tensor->set_data_type(input_tensor->data_type());
|
out_tensor->set_data_type(input_tensor->data_type());
|
||||||
|
pad_l_ = GetPadLeft();
|
||||||
|
pad_u_ = GetPadUp();
|
||||||
|
pad_d_ = GetPadDown();
|
||||||
|
pad_r_ = GetPadRight();
|
||||||
|
|
||||||
if (!GetInferFlag()) {
|
if (!GetInferFlag()) {
|
||||||
return RET_OK;
|
return RET_OK;
|
||||||
}
|
}
|
||||||
|
|
|
@ -210,6 +210,11 @@ int DepthwiseConv2D::InferShape(std::vector<lite::Tensor *> inputs_, std::vector
|
||||||
MS_ASSERT(output != nullptr);
|
MS_ASSERT(output != nullptr);
|
||||||
output->SetFormat(input->GetFormat());
|
output->SetFormat(input->GetFormat());
|
||||||
output->set_data_type(input->data_type());
|
output->set_data_type(input->data_type());
|
||||||
|
pad_l_ = GetPadLeft();
|
||||||
|
pad_u_ = GetPadUp();
|
||||||
|
pad_d_ = GetPadDown();
|
||||||
|
pad_r_ = GetPadRight();
|
||||||
|
|
||||||
if (!GetInferFlag()) {
|
if (!GetInferFlag()) {
|
||||||
return RET_OK;
|
return RET_OK;
|
||||||
}
|
}
|
||||||
|
@ -218,12 +223,8 @@ int DepthwiseConv2D::InferShape(std::vector<lite::Tensor *> inputs_, std::vector
|
||||||
int input_w = in_shape.at(2);
|
int input_w = in_shape.at(2);
|
||||||
int input_channel = in_shape.at(3);
|
int input_channel = in_shape.at(3);
|
||||||
int output_w = 0, output_h = 0;
|
int output_w = 0, output_h = 0;
|
||||||
|
|
||||||
input_channel_ = input_channel;
|
input_channel_ = input_channel;
|
||||||
pad_l_ = GetPadLeft();
|
|
||||||
pad_u_ = GetPadUp();
|
|
||||||
pad_d_ = GetPadDown();
|
|
||||||
pad_r_ = GetPadRight();
|
|
||||||
if (GetPadMode() == schema::PadMode_SAME_UPPER) {
|
if (GetPadMode() == schema::PadMode_SAME_UPPER) {
|
||||||
output_h = std::ceil(static_cast<float>(input_h) / static_cast<float>(GetStrideH()));
|
output_h = std::ceil(static_cast<float>(input_h) / static_cast<float>(GetStrideH()));
|
||||||
output_w = std::ceil(static_cast<float>(input_w) / static_cast<float>(GetStrideW()));
|
output_w = std::ceil(static_cast<float>(input_w) / static_cast<float>(GetStrideW()));
|
||||||
|
|
|
@ -4,3 +4,4 @@ mtk_face_features_v3.onnx
|
||||||
ml_face_3d.onnx
|
ml_face_3d.onnx
|
||||||
gts_version-RFB-320_simplified.onnx
|
gts_version-RFB-320_simplified.onnx
|
||||||
mnist-8.onnx
|
mnist-8.onnx
|
||||||
|
crnn_lite_lstm_v2.onnx:32,32,32,1
|
||||||
|
|
|
@ -51,7 +51,7 @@ function Run_Converter() {
|
||||||
|
|
||||||
# Convert onnx models:
|
# Convert onnx models:
|
||||||
while read line; do
|
while read line; do
|
||||||
model_name=${line}
|
model_name=${line%:*}
|
||||||
if [[ $model_name == \#* ]]; then
|
if [[ $model_name == \#* ]]; then
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
@ -202,15 +202,17 @@ function Run_x86() {
|
||||||
|
|
||||||
# Run onnx converted models:
|
# Run onnx converted models:
|
||||||
while read line; do
|
while read line; do
|
||||||
model_name=${line}
|
model_name=${line%:*}
|
||||||
|
length=${#model_name}
|
||||||
|
input_shapes=${line:length+1}
|
||||||
if [[ $model_name == \#* ]]; then
|
if [[ $model_name == \#* ]]; then
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
echo ${model_name} >> "${run_x86_log_file}"
|
echo ${model_name} >> "${run_x86_log_file}"
|
||||||
echo 'cd '${x86_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86} >> "${run_x86_log_file}"
|
echo 'cd '${x86_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86} >> "${run_x86_log_file}"
|
||||||
cd ${x86_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
|
cd ${x86_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
|
||||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelFile='${ms_models_path}'/'${model_name}'.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out' >> "${run_x86_log_file}"
|
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelFile='${ms_models_path}'/'${model_name}'.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --inputShapes='${input_shapes}' --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out' >> "${run_x86_log_file}"
|
||||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelFile=${ms_models_path}/${model_name}.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out >> "${run_x86_log_file}"
|
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelFile=${ms_models_path}/${model_name}.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --inputShapes=${input_shapes} --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out >> "${run_x86_log_file}"
|
||||||
if [ $? = 0 ]; then
|
if [ $? = 0 ]; then
|
||||||
run_result='x86: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
run_result='x86: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
||||||
else
|
else
|
||||||
|
@ -245,7 +247,7 @@ function Run_x86() {
|
||||||
echo ${model_name} >> "${run_x86_log_file}"
|
echo ${model_name} >> "${run_x86_log_file}"
|
||||||
echo 'cd '${x86_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86} >> "${run_x86_log_file}"
|
echo 'cd '${x86_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86} >> "${run_x86_log_file}"
|
||||||
cd ${x86_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
|
cd ${x86_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
|
||||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelFile='${ms_models_path}'/'${model_name}'.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out' >> "${run_x86_log_file}"
|
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelFile='${ms_models_path}'/'${model_name}'.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out' >> "${run_x86_log_file}"
|
||||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelFile=${ms_models_path}/${model_name}.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out >> "${run_x86_log_file}"
|
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelFile=${ms_models_path}/${model_name}.ms --inDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --benchmarkDataFile=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out >> "${run_x86_log_file}"
|
||||||
if [ $? = 0 ]; then
|
if [ $? = 0 ]; then
|
||||||
run_result='x86: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
run_result='x86: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
||||||
|
@ -400,14 +402,16 @@ function Run_arm64() {
|
||||||
|
|
||||||
# Run onnx converted models:
|
# Run onnx converted models:
|
||||||
while read line; do
|
while read line; do
|
||||||
model_name=${line}
|
model_name=${line%:*}
|
||||||
|
length=${#model_name}
|
||||||
|
input_shapes=${line:length+1}
|
||||||
if [[ $model_name == \#* ]]; then
|
if [[ $model_name == \#* ]]; then
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
echo ${model_name} >> "${run_arm64_log_file}"
|
echo ${model_name} >> "${run_arm64_log_file}"
|
||||||
echo 'cd /data/local/tmp/benchmark_test' > adb_run_cmd.txt
|
echo 'cd /data/local/tmp/benchmark_test' > adb_run_cmd.txt
|
||||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inDataFile=/data/local/tmp/input_output/input/'${model_name}'.ms.bin --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out' >> "${run_arm64_log_file}"
|
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inDataFile=/data/local/tmp/input_output/input/'${model_name}'.ms.bin --inputShapes='${input_shapes}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out' >> "${run_arm64_log_file}"
|
||||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inDataFile=/data/local/tmp/input_output/input/'${model_name}'.ms.bin --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out' >> adb_run_cmd.txt
|
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inDataFile=/data/local/tmp/input_output/input/'${model_name}'.ms.bin --inputShapes='${input_shapes}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out' >> adb_run_cmd.txt
|
||||||
adb -s ${device_id} shell < adb_run_cmd.txt >> "${run_arm64_log_file}"
|
adb -s ${device_id} shell < adb_run_cmd.txt >> "${run_arm64_log_file}"
|
||||||
if [ $? = 0 ]; then
|
if [ $? = 0 ]; then
|
||||||
run_result='arm64: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
run_result='arm64: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
||||||
|
@ -417,8 +421,8 @@ function Run_arm64() {
|
||||||
# run benchmark test without clib data
|
# run benchmark test without clib data
|
||||||
echo ${model_name} >> "${run_arm64_log_file}"
|
echo ${model_name} >> "${run_arm64_log_file}"
|
||||||
echo 'cd /data/local/tmp/benchmark_test' > adb_run_cmd.txt
|
echo 'cd /data/local/tmp/benchmark_test' > adb_run_cmd.txt
|
||||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --warmUpLoopCount=1 --loopCount=2' >> "{run_arm64_log_file}"
|
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inputShapes='${input_shapes}' --warmUpLoopCount=1 --loopCount=2' >> "{run_arm64_log_file}"
|
||||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --warmUpLoopCount=1 --loopCount=2' >> adb_run_cmd.txt
|
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --modelFile='${model_name}'.ms --inputShapes='${input_shapes}' --warmUpLoopCount=1 --loopCount=2' >> adb_run_cmd.txt
|
||||||
adb -s ${device_id} shell < adb_run_cmd.txt >> "${run_arm64_log_file}"
|
adb -s ${device_id} shell < adb_run_cmd.txt >> "${run_arm64_log_file}"
|
||||||
if [ $? = 0 ]; then
|
if [ $? = 0 ]; then
|
||||||
run_result='arm64: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
run_result='arm64: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
||||||
|
|
Loading…
Reference in New Issue