!20289 [MS][Lite][ToD] Add code example to CI tests

Merge pull request !20289 from ehaleva/CI_tests
This commit is contained in:
i-robot 2021-07-19 03:13:28 +00:00 committed by Gitee
commit 034679f6f7
10 changed files with 152 additions and 58 deletions

View File

@ -7,7 +7,6 @@ set(CONVERTER_ROOT_DIR ${RUNTIME_PKG_NAME}/tools/converter)
set(OBFUSCATOR_ROOT_DIR ${RUNTIME_PKG_NAME}/tools/obfuscator) set(OBFUSCATOR_ROOT_DIR ${RUNTIME_PKG_NAME}/tools/obfuscator)
set(CROPPER_ROOT_DIR ${RUNTIME_PKG_NAME}/tools/cropper) set(CROPPER_ROOT_DIR ${RUNTIME_PKG_NAME}/tools/cropper)
set(TEST_CASE_DIR ${TOP_DIR}/mindspore/lite/test/build) set(TEST_CASE_DIR ${TOP_DIR}/mindspore/lite/test/build)
set(TEST_DIR ${TOP_DIR}/mindspore/lite/test)
set(RUNTIME_DIR ${RUNTIME_PKG_NAME}/runtime) set(RUNTIME_DIR ${RUNTIME_PKG_NAME}/runtime)
set(RUNTIME_INC_DIR ${RUNTIME_PKG_NAME}/runtime/include) set(RUNTIME_INC_DIR ${RUNTIME_PKG_NAME}/runtime/include)

View File

@ -1,13 +1,15 @@
#!/bin/bash #!/bin/bash
echo "============Exporting==========" if [[ -z ${EXPORT} ]]; then
rm -f lenet_tod.mindir echo "============Exporting=========="
if [ -n "$2" ]; then rm -f lenet_tod.mindir
DOCKER_IMG=$2 if [ -n "$2" ]; then
docker run -w $PWD --runtime=nvidia -v /home/$USER:/home/$USER --privileged=true ${DOCKER_IMG} /bin/bash -c "PYTHONPATH=../../../../../model_zoo/official/cv/lenet/src python lenet_export.py '$1'; chmod 444 lenet_tod.mindir; rm -rf __pycache__" DOCKER_IMG=$2
else docker run -w $PWD --runtime=nvidia -v /home/$USER:/home/$USER --privileged=true ${DOCKER_IMG} /bin/bash -c "PYTHONPATH=../../../../../model_zoo/official/cv/lenet/src python lenet_export.py '$1'; chmod 444 lenet_tod.mindir; rm -rf __pycache__"
echo "MindSpore docker was not provided, attempting to run locally" else
PYTHONPATH=../../../../../model_zoo/official/cv/lenet/src python lenet_export.py $1 echo "MindSpore docker was not provided, attempting to run locally"
PYTHONPATH=../../../../../model_zoo/official/cv/lenet/src python lenet_export.py $1
fi
fi fi
@ -33,5 +35,5 @@ if [[ ! -z ${QUANTIZE} ]]; then
echo "Quantizing weights" echo "Quantizing weights"
QUANT_OPTIONS="--quantType=WeightQuant --bitNum=8 --quantWeightSize=100 --quantWeightChannel=15" QUANT_OPTIONS="--quantType=WeightQuant --bitNum=8 --quantWeightSize=100 --quantWeightChannel=15"
fi fi
LD_LIBRARY_PATH=./ $CONVERTER --fmk=MINDIR --trainModel=true --modelFile=lenet_tod.mindir --outputFile=lenet_tod $QUANT_OPTIONS LD_LIBRARY_PATH=./:${LD_LIBRARY_PATH} $CONVERTER --fmk=MINDIR --trainModel=true --modelFile=lenet_tod.mindir --outputFile=lenet_tod $QUANT_OPTIONS

View File

@ -2,26 +2,42 @@
display_usage() display_usage()
{ {
echo -e "\nUsage: prepare_and_run.sh -D dataset_path [-d mindspore_docker] [-r release.tar.gz] [-t arm64|x86] [-q] [-o] [-b virtual_batch]\n" echo -e "\nUsage: prepare_and_run.sh -D dataset_path [-d mindspore_docker] [-r release.tar.gz] [-t arm64|x86] [-q] [-o] [-b virtual_batch] [-m mindir]\n"
} }
checkopts() checkopts()
{ {
TARGET="arm64" TARGET="arm64"
DOCKER="" DOCKER=""
MINDIR_FILE=""
MNIST_DATA_PATH="" MNIST_DATA_PATH=""
QUANTIZE="" QUANTIZE=""
ENABLEFP16=false FP16_FLAG=""
VIRTUAL_BATCH=-1 VIRTUAL_BATCH=-1
while getopts 'D:d:r:t:qob:' opt while getopts 'D:b:d:m:oqr:t:' opt
do do
case "${opt}" in case "${opt}" in
b)
VIRTUAL_BATCH=$OPTARG
;;
D) D)
MNIST_DATA_PATH=$OPTARG MNIST_DATA_PATH=$OPTARG
;; ;;
d) d)
DOCKER=$OPTARG DOCKER=$OPTARG
;; ;;
m)
MINDIR_FILE=$OPTARG
;;
o)
FP16_FLAG="-o"
;;
q)
QUANTIZE="QUANTIZE"
;;
r)
TARBALL=$OPTARG
;;
t) t)
if [ "$OPTARG" == "arm64" ] || [ "$OPTARG" == "x86" ]; then if [ "$OPTARG" == "arm64" ] || [ "$OPTARG" == "x86" ]; then
TARGET=$OPTARG TARGET=$OPTARG
@ -31,18 +47,6 @@ checkopts()
exit 1 exit 1
fi fi
;; ;;
r)
TARBALL=$OPTARG
;;
q)
QUANTIZE="QUANTIZE"
;;
o)
ENABLEFP16=true
;;
b)
VIRTUAL_BATCH=$OPTARG
;;
*) *)
echo "Unknown option ${opt}!" echo "Unknown option ${opt}!"
display_usage display_usage
@ -81,10 +85,15 @@ else
BATCH=1 BATCH=1
fi fi
EXPORT=""
if [ "$MINDIR_FILE" != "" ]; then
cp -f $MINDIR_FILE model/lenet_tod.mindir
EXPORT="DONT_EXPORT"
fi
cd model/ || exit 1 cd model/ || exit 1
rm -f *.ms rm -f *.ms
QUANTIZE=${QUANTIZE} ./prepare_model.sh $BATCH $DOCKER || exit 1 EXPORT=${EXPORT} QUANTIZE=${QUANTIZE} ./prepare_model.sh $BATCH $DOCKER || exit 1
cd ../ cd ../
# Copy the .ms model to the package folder # Copy the .ms model to the package folder
@ -109,6 +118,8 @@ fi
rm -rf msl rm -rf msl
mv mindspore-* msl/ mv mindspore-* msl/
rm -rf msl/tools/
rm ${PACKAGE}/lib/*.a
# Copy the dataset to the package # Copy the dataset to the package
cp -r $MNIST_DATA_PATH ${PACKAGE}/dataset || exit 1 cp -r $MNIST_DATA_PATH ${PACKAGE}/dataset || exit 1
@ -127,21 +138,11 @@ if [ "${TARGET}" == "arm64" ]; then
adb push ${PACKAGE} /data/local/tmp/ adb push ${PACKAGE} /data/local/tmp/
echo "========Training on Device=====" echo "========Training on Device====="
if "$ENABLEFP16"; then adb shell "cd /data/local/tmp/package-arm64 && /system/bin/sh train.sh ${FP16_FLAG} -b ${VIRTUAL_BATCH}"
echo "Training fp16.."
adb shell "cd /data/local/tmp/package-arm64 && /system/bin/sh train.sh -o -b ${VIRTUAL_BATCH}"
else
adb shell "cd /data/local/tmp/package-arm64 && /system/bin/sh train.sh -b ${VIRTUAL_BATCH}"
fi
echo echo
echo "===Evaluating trained Model=====" echo "===Evaluating trained Model====="
if "$ENABLEFP16"; then adb shell "cd /data/local/tmp/package-arm64 && /system/bin/sh eval.sh ${FP16_FLAG}"
echo "Evaluating fp16 Model.."
adb shell "cd /data/local/tmp/package-arm64 && /system/bin/sh eval.sh -o"
else
adb shell "cd /data/local/tmp/package-arm64 && /system/bin/sh eval.sh"
fi
echo echo
else else
cd ${PACKAGE} || exit 1 cd ${PACKAGE} || exit 1

View File

@ -1,17 +1,19 @@
#!/bin/bash #!/bin/bash
echo "============Exporting==========" if [[ -z ${EXPORT} ]]; then
if [ -n "$1" ]; then echo "============Exporting=========="
DOCKER_IMG=$1 if [ -n "$1" ]; then
docker run -w $PWD --runtime=nvidia -v /home/$USER:/home/$USER --privileged=true ${DOCKER_IMG} /bin/bash -c "PYTHONPATH=../../../../../model_zoo/official/cv/lenet/src python lenet_export.py; chmod 444 lenet_tod.mindir; rm -rf __pycache__" DOCKER_IMG=$1
else docker run -w $PWD --runtime=nvidia -v /home/$USER:/home/$USER --privileged=true ${DOCKER_IMG} /bin/bash -c "PYTHONPATH=../../../../../model_zoo/official/cv/lenet/src python lenet_export.py; chmod 444 lenet_tod.mindir; rm -rf __pycache__"
echo "MindSpore docker was not provided, attempting to run locally" else
PYTHONPATH=../../../../../model_zoo/official/cv/lenet/src python lenet_export.py echo "MindSpore docker was not provided, attempting to run locally"
fi PYTHONPATH=../../../../../model_zoo/official/cv/lenet/src python lenet_export.py
fi
if [ ! -f "$CONVERTER" ]; then
echo "converter_lite could not be found in MindSpore build directory nor in system path" if [ ! -f "$CONVERTER" ]; then
exit 1 echo "converter_lite could not be found in MindSpore build directory nor in system path"
exit 1
fi
fi fi
echo "============Converting=========" echo "============Converting========="

View File

@ -18,14 +18,15 @@
display_usage() display_usage()
{ {
echo -e "\nUsage: prepare_and_run.sh -D dataset_path [-d mindspore_docker] [-r release.tar.gz]\n" echo -e "\nUsage: prepare_and_run.sh -D dataset_path [-d mindspore_docker] [-r release.tar.gz] [-m mindir]\n"
} }
checkopts() checkopts()
{ {
DOCKER="" DOCKER=""
MINDIR_FILE=""
MNIST_DATA_PATH="" MNIST_DATA_PATH=""
while getopts 'D:d:r:' opt while getopts 'D:d:m:r:' opt
do do
case "${opt}" in case "${opt}" in
D) D)
@ -34,6 +35,9 @@ checkopts()
d) d)
DOCKER=$OPTARG DOCKER=$OPTARG
;; ;;
m)
MINDIR_FILE=$OPTARG
;;
r) r)
TARBALL="-r $OPTARG" TARBALL="-r $OPTARG"
;; ;;
@ -56,12 +60,18 @@ fi
BASEPATH=$(cd "$(dirname $0)" || exit; pwd) BASEPATH=$(cd "$(dirname $0)" || exit; pwd)
EXPORT=""
if [ "$MINDIR_FILE" != "" ]; then
cp -f $MINDIR_FILE model/lenet_tod.mindir
EXPORT="DONT_EXPORT"
fi
cd model/ || exit 1 cd model/ || exit 1
MSLITE_LINUX=$(ls -d ${BASEPATH}/build/mindspore-lite-*-linux-x64) MSLITE_LINUX=$(ls -d ${BASEPATH}/build/mindspore-lite-*-linux-x64)
CONVERTER=${MSLITE_LINUX}/tools/converter/converter/converter_lite CONVERTER=${MSLITE_LINUX}/tools/converter/converter/converter_lite
rm -f *.ms rm -f *.ms
LD_LIBRARY_PATH=${MSLITE_LINUX}/tools/converter/lib/:${MSLITE_LINUX}/tools/converter/third_party/glog/lib LD_LIBRARY_PATH=${MSLITE_LINUX}/tools/converter/lib/:${MSLITE_LINUX}/tools/converter/third_party/glog/lib
LD_LIBRARY_PATH=${LD_LIBRARY_PATH} CONVERTER=${CONVERTER} ./prepare_model.sh $DOCKER || exit 1 EXPORT=${EXPORT} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} CONVERTER=${CONVERTER} ./prepare_model.sh $DOCKER || exit 1
cd ../ cd ../
cd target || exit 1 cd target || exit 1

View File

@ -35,5 +35,5 @@ if [[ ! -z ${QUANTIZE} ]]; then
echo "Quantizing weights" echo "Quantizing weights"
QUANT_OPTIONS="--quantType=WeightQuant --bitNum=8 --quantWeightSize=100 --quantWeightChannel=15" QUANT_OPTIONS="--quantType=WeightQuant --bitNum=8 --quantWeightSize=100 --quantWeightChannel=15"
fi fi
LD_LIBRARY_PATH=./ $CONVERTER --fmk=MINDIR --trainModel=true --modelFile=lenet_tod.mindir --outputFile=lenet_tod $QUANT_OPTIONS LD_LIBRARY_PATH=./:${LD_LIBRARY_PATH} $CONVERTER --fmk=MINDIR --trainModel=true --modelFile=lenet_tod.mindir --outputFile=lenet_tod $QUANT_OPTIONS

View File

@ -114,6 +114,8 @@ fi
rm -rf msl rm -rf msl
mv mindspore-* msl/ mv mindspore-* msl/
rm -rf msl/tools/
rm ${PACKAGE}/lib/*.a
# Copy the dataset to the package # Copy the dataset to the package
cp -r $MNIST_DATA_PATH ${PACKAGE}/dataset || exit 1 cp -r $MNIST_DATA_PATH ${PACKAGE}/dataset || exit 1

View File

@ -113,9 +113,9 @@ class MSTensor::Impl {
return empty; return empty;
} }
auto shape = lite_tensor_->shape(); auto shape = lite_tensor_->shape();
lite_shape.resize(shape.size()); lite_shape_.resize(shape.size());
std::transform(shape.begin(), shape.end(), lite_shape.begin(), [](int c) { return static_cast<int64_t>(c); }); std::transform(shape.begin(), shape.end(), lite_shape_.begin(), [](int c) { return static_cast<int64_t>(c); });
return lite_shape; return lite_shape_;
} }
virtual std::shared_ptr<Impl> Clone() const { return nullptr; } virtual std::shared_ptr<Impl> Clone() const { return nullptr; }
@ -221,7 +221,7 @@ class MSTensor::Impl {
private: private:
tensor::MSTensor *lite_tensor_ = nullptr; tensor::MSTensor *lite_tensor_ = nullptr;
std::string tensor_name_ = ""; std::string tensor_name_ = "";
mutable std::vector<int64_t> lite_shape; mutable std::vector<int64_t> lite_shape_;
bool own_data_ = false; bool own_data_ = false;
bool from_session_ = false; bool from_session_ = false;
}; };

View File

@ -342,6 +342,60 @@ ENDM
return ${fail} return ${fail}
} }
function Run_CodeExamples() {
ls ${basepath}/../../
fail=0
target="x86"
tarball_path=${x86_path}/mindspore-lite-${version}-linux-x64.tar.gz
if [[ $backend == "arm64_train" ]]; then
target="arm64"
tarball_path=${arm64_path}/mindspore-lite-${version_arm64}-android-aarch64.tar.gz
export ANDROID_SERIAL=${device_id}
fi
export PATH=${x86_path}/mindspore-lite-${version}-linux-x64/tools/converter/converter/:$PATH
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${x86_path}/mindspore-lite-${version}-linux-x64/tools/converter/lib/:${x86_path}/mindspore-lite-${version}-linux-x64/tools/converter/third_party/glog/lib
if [[ $backend == "all" || $backend == "x86-all" || $backend == "x86-java" ]]; then
cd ${basepath}/../../examples/train_lenet_java || exit 1
chmod 777 ./prepare_and_run.sh
./prepare_and_run.sh -D ${datasets_path}/mnist -r ${tarball_path} -m ${models_path}/code_example.mindir >> ${run_code_examples_log_file}
accurate=$(tail -10 ${run_code_examples_log_file} | awk -F= 'NF==2 && /accuracy/ { sum += $2} END { print (sum > 0.95) }')
cd -
fi
if [[ $backend == "all" || $backend == "train" || $backend == "x86_train" || $backend == "codegen&train" || $backend == "arm64_train" ]]; then
cd ${basepath}/../../examples/unified_api || exit 1
chmod 777 ./prepare_and_run.sh
chmod 777 ./*/*.sh
./prepare_and_run.sh -D ${datasets_path}/mnist -r ${tarball_path} -t ${target} -m ${models_path}/code_example.mindir >> ${run_code_examples_log_file}
accurate=$(tail -20 ${run_code_examples_log_file} | awk 'NF==3 && /Accuracy is/ { sum += $3} END { print (sum > 1.9) }')
if [ $accurate -eq 1 ]; then
echo "Unified API Trained and reached accuracy" >> ${run_code_examples_log_file}
else
echo "Unified API demo failure" >> ${run_code_examples_log_file}
fail=1
fi
rm -rf package*/dataset
cd -
cd ${basepath}/../../examples/train_lenet || exit 1
chmod 777 ./prepare_and_run.sh
chmod 777 ./*/*.sh
./prepare_and_run.sh -D ${datasets_path}/mnist -r ${tarball_path} -t ${target} -m ${models_path}/code_example.mindir >> ${run_code_examples_log_file}
accurate=$(tail -10 ${run_code_examples_log_file} | awk 'NF==3 && /Accuracy is/ { sum += $3} END { print (sum > 1.9) }')
if [ $accurate -eq 1 ]; then
echo "Lenet Trained and reached accuracy" >> ${run_code_examples_log_file}
else
echo "Train Lenet demo failure" >> ${run_code_examples_log_file}
fail=1
fi
rm -rf package*/dataset
cd -
fi
return ${fail}
}
function Print_Result() { function Print_Result() {
MS_PRINT_TESTCASE_END_MSG MS_PRINT_TESTCASE_END_MSG
while read line; do while read line; do
@ -419,6 +473,8 @@ if [[ $train_io_path == "" ]]; then
fi fi
echo $train_io_path echo $train_io_path
datasets_path=${models_path}/../datasets/
arm64_path=${release_path}/android_aarch64/npu arm64_path=${release_path}/android_aarch64/npu
file=$(ls ${arm64_path}/*android-aarch64.tar.gz) file=$(ls ${arm64_path}/*android-aarch64.tar.gz)
file_name="${file##*/}" file_name="${file##*/}"
@ -512,6 +568,9 @@ adb_push_arm32_log_file=${logs_path}/adb_push_arm32_log.txt
adb_cmd_arm32_file=${logs_path}/adb_arm32_cmd.txt adb_cmd_arm32_file=${logs_path}/adb_arm32_cmd.txt
adb_cmd_arm32_run_file=${logs_path}/adb_arm32_cmd_run.txt adb_cmd_arm32_run_file=${logs_path}/adb_arm32_cmd_run.txt
run_code_examples_log_file=${logs_path}/run_code_examples_log.txt
echo 'run code examlpe logs: ' > ${run_code_examples_log_file}
# Copy the MindSpore models: # Copy the MindSpore models:
echo "Push files to benchmark_train_test folder and run benchmark_train" echo "Push files to benchmark_train_test folder and run benchmark_train"
benchmark_train_test_path=${basepath}/benchmark_train_test benchmark_train_test_path=${basepath}/benchmark_train_test
@ -528,6 +587,14 @@ if [[ $backend == "all" || $backend == "train" || $backend == "x86_train" || $ba
Run_x86_PID=$! Run_x86_PID=$!
sleep 1 sleep 1
fi fi
if [[ $backend == "all" || $backend == "train" || $backend == "x86_train" || $backend == "x86-java" || $backend == "codegen&train" || $backend == "arm64_train" ]]; then
# Run Code Examples
echo "Start Code Examples ..."
Run_CodeExamples &
Run_CodeExamples_status=$?
Run_CodeExamples_PID=$!
sleep 1
fi
if [[ $backend == "all" || $backend == "train" || $backend == "arm64_train" || $backend == "codegen&train" ]]; then if [[ $backend == "all" || $backend == "train" || $backend == "arm64_train" || $backend == "codegen&train" ]]; then
# Run on arm64 # Run on arm64
echo "Start Run arm64 ..." echo "Start Run arm64 ..."
@ -554,6 +621,17 @@ if [[ $backend == "all" || $backend == "train" || $backend == "x86_train" || $ba
isFailed=1 isFailed=1
fi fi
fi fi
if [[ $backend == "all" || $backend == "train" || $backend == "x86_train" || $backend == "x86-java" || $backend == "codegen&train" || $backend == "arm64_train" ]]; then
wait ${Run_CodeExamples_PID}
Run_CodeExamples_status=$?
if [[ ${Run_CodeExamples_status} != 0 ]];then
echo "Run CodeExamples failed"
cat ${run_code_examples_log_file}
isFailed=1
fi
fi
if [[ $backend == "all" || $backend == "train" || $backend == "arm64_train" || $backend == "codegen&train" ]]; then if [[ $backend == "all" || $backend == "train" || $backend == "arm64_train" || $backend == "codegen&train" ]]; then
# wait ${Run_arm64_PID} # wait ${Run_arm64_PID}
# Run_arm64_status=$? # Run_arm64_status=$?