From 42208998ecc8f83fecbba29416b83102fe9dd885 Mon Sep 17 00:00:00 2001 From: sunsuodong Date: Fri, 2 Apr 2021 14:40:58 +0800 Subject: [PATCH] modify package name --- build.sh | 12 +-- mindspore/lite/CMakeLists.txt | 12 +-- mindspore/lite/test/run_benchmark_nets.sh | 15 ++-- mindspore/lite/test/win_runtest.bat | 91 +++++++++-------------- scripts/lite_release_package.sh | 63 ++++++++-------- 5 files changed, 77 insertions(+), 116 deletions(-) diff --git a/build.sh b/build.sh index 136a2a5c759..543e0336ee8 100755 --- a/build.sh +++ b/build.sh @@ -706,11 +706,7 @@ build_lite_java_arm32() { build_lite_java_x86() { # build mindspore-lite x86 - if [[ "$X86_64_SIMD" == "sse" || "$X86_64_SIMD" == "avx" ]]; then - local JTARBALL=mindspore-lite-${VERSION_STR}-inference-linux-x64-${X86_64_SIMD} - else - local JTARBALL=mindspore-lite-${VERSION_STR}-inference-linux-x64 - fi + local JTARBALL=mindspore-lite-${VERSION_STR}-inference-linux-x64 if [[ "X$INC_BUILD" == "Xoff" ]] || [[ ! -f "${BASEPATH}/mindspore/lite/build/java/${JTARBALL}.tar.gz" ]]; then build_lite "x86_64" "off" "" fi @@ -826,11 +822,6 @@ build_java() { zip -r mindspore-lite-maven-${VERSION_STR}.zip mindspore # build linux x86 jar - if [[ "$X86_64_SIMD" == "sse" || "$X86_64_SIMD" == "avx" ]]; then - local LINUX_X86_PACKAGE_NAME=mindspore-lite-${VERSION_STR}-inference-linux-x64-${X86_64_SIMD}-jar - else - local LINUX_X86_PACKAGE_NAME=mindspore-lite-${VERSION_STR}-inference-linux-x64-jar - fi check_java_home build_lite_java_x86 build_jni_x86_64 @@ -845,6 +836,7 @@ build_java() { mkdir -p ${JAVA_PATH}/java/linux_x86/build/lib cp ${JAVA_PATH}/java/linux_x86/libs/*.so ${JAVA_PATH}/java/linux_x86/build/lib/jar cd ${JAVA_PATH}/java/linux_x86/build/ + local LINUX_X86_PACKAGE_NAME=mindspore-lite-${VERSION_STR}-inference-linux-x64-jar cp -r ${JAVA_PATH}/java/linux_x86/build/lib ${JAVA_PATH}/java/linux_x86/build/${LINUX_X86_PACKAGE_NAME} tar czvf ${LINUX_X86_PACKAGE_NAME}.tar.gz ${LINUX_X86_PACKAGE_NAME} # copy output diff --git a/mindspore/lite/CMakeLists.txt b/mindspore/lite/CMakeLists.txt index e224496503b..6944514d8c3 100644 --- a/mindspore/lite/CMakeLists.txt +++ b/mindspore/lite/CMakeLists.txt @@ -88,17 +88,9 @@ else() elseif(PLATFORM_ARM32) set(RUNTIME_COMPONENT_NAME inference-android-aarch32) elseif(WIN32) - if("${X86_64_SIMD}" STREQUAL "off") - set(RUNTIME_COMPONENT_NAME inference-win-x64) - else() - set(RUNTIME_COMPONENT_NAME inference-win-x64-${X86_64_SIMD}) - endif() + set(RUNTIME_COMPONENT_NAME inference-win-x64) else() - if("${X86_64_SIMD}" STREQUAL "off") - set(RUNTIME_COMPONENT_NAME inference-linux-x64) - else() - set(RUNTIME_COMPONENT_NAME inference-linux-x64-${X86_64_SIMD}) - endif() + set(RUNTIME_COMPONENT_NAME inference-linux-x64) endif() endif() diff --git a/mindspore/lite/test/run_benchmark_nets.sh b/mindspore/lite/test/run_benchmark_nets.sh index 1a2a64a9f61..87e3d446699 100755 --- a/mindspore/lite/test/run_benchmark_nets.sh +++ b/mindspore/lite/test/run_benchmark_nets.sh @@ -679,9 +679,9 @@ function Run_x86() { # Run on x86 sse platform: function Run_x86_sse() { - cd ${x86_path} || exit 1 - tar -zxf mindspore-lite-${version}-inference-linux-x64-sse.tar.gz || exit 1 - cd ${x86_path}/mindspore-lite-${version}-inference-linux-x64-sse || return 1 + cd ${x86_path}/sse || exit 1 + tar -zxf mindspore-lite-${version}-inference-linux-x64.tar.gz || exit 1 + cd ${x86_path}/sse/mindspore-lite-${version}-inference-linux-x64 || return 1 export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./inference/lib:./inference/minddata/lib cp tools/benchmark/benchmark ./ || exit 1 @@ -951,9 +951,9 @@ function Run_x86_sse() { # Run on x86 avx platform: function Run_x86_avx() { - cd ${x86_path} || exit 1 - tar -zxf mindspore-lite-${version}-inference-linux-x64-avx.tar.gz || exit 1 - cd ${x86_path}/mindspore-lite-${version}-inference-linux-x64-avx || return 1 + cd ${x86_path}/avx || exit 1 + tar -zxf mindspore-lite-${version}-inference-linux-x64.tar.gz || exit 1 + cd ${x86_path}/avx/mindspore-lite-${version}-inference-linux-x64 || return 1 export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./inference/lib:./inference/minddata/lib cp tools/benchmark/benchmark ./ || exit 1 @@ -2043,7 +2043,7 @@ basepath=$(pwd) echo ${basepath} #set -e -# Example:sh run_benchmark_nets.sh -r /home/temp_test -m /home/temp_test/models -d "8KE5T19620002408" +# Example:sh run_benchmark_nets.sh -r /home/temp_test -m /home/temp_test/models -d "8KE5T19620002408" -e arm_cpu while getopts "r:m:d:e:" opt; do case ${opt} in r) @@ -2071,7 +2071,6 @@ done # mkdir train x86_path=${release_path}/ubuntu_x86 -# mv ${x86_path}/*train-linux-x64* ./train file_name=$(ls ${x86_path}/*inference-linux-x64.tar.gz) IFS="-" read -r -a file_name_array <<< "$file_name" version=${file_name_array[2]} diff --git a/mindspore/lite/test/win_runtest.bat b/mindspore/lite/test/win_runtest.bat index 160caa023fc..ef9cd6a310c 100644 --- a/mindspore/lite/test/win_runtest.bat +++ b/mindspore/lite/test/win_runtest.bat @@ -16,57 +16,34 @@ @title win_run_test setlocal enabledelayedexpansion -SET BASEPATH=%CD% -SET OUTPUT_PATH=%BASEPATH%\output -SET TOOL_PATH=%1 -SET TOOL_PATH=%TOOL_PATH:"=%\windows_x64 +SET PACKAGE_PATH=%1 SET MODEL_PATH_BASE=%2 -SET MODEL_PATH=%MODEL_PATH_BASE:"=%\models\hiai -SET BENCHMARK_BASE=%BASEPATH:"=%\output\benchmark -SET MODEL_CONFIG=%BASEPATH%\win_models.cfg - -for /f "tokens=3 delims=-" %%i in ('dir /b %TOOL_PATH%\mindspore-lite-*-inference-win-x64.zip') do set VERSION=%%i - -cd /d %BASEPATH% -IF EXIST "%BASEPATH%/output" ( - rd /s /q output -) -md output -cd /d %OUTPUT_PATH% -IF EXIST benchmark ( - rd /s /q benchmark -) -md benchmark +SET INSTRUCTION=%3 +SET BASEPATH=%CD% SET RET_CODE=0 -SET SO_PATH=mindspore-lite-%VERSION%-inference-win-x64 -IF "%3%" == "sse" ( - SET SO_PATH=mindspore-lite-%VERSION%-inference-win-x64-sse -) -IF "%3%" == "avx" ( - SET SO_PATH=mindspore-lite-%VERSION%-inference-win-x64-avx -) -IF "%3%" == "avx512" ( - SET SO_PATH=mindspore-lite-%VERSION%-inference-win-x64-avx512 -) -7z x -r "%TOOL_PATH%/%SO_PATH%.zip" -o"%BENCHMARK_BASE%" +SET PACKAGE_PATH=%PACKAGE_PATH:"=%\windows_x64\%INSTRUCTION% +7z x -r "%PACKAGE_PATH%\mindspore-lite-*.zip" IF NOT %errorlevel% == 0 ( echo "Decompression of runtime tool fail!" SET RET_CODE=1 goto run_eof ) -cd benchmark -md ms +for /f %%i in ('dir /b %PACKAGE_PATH%\mindspore-lite-*.zip') do set PACKAGE_NAME=%%i +set PACKAGE_NAME=%PACKAGE_NAME:.zip=% +SET DST_PACKAGE_PATH=%BASEPATH%\%PACKAGE_NAME% + +echo "Convert models" +copy %DST_PACKAGE_PATH%\tools\converter\lib\* %DST_PACKAGE_PATH%\tools\converter\converter\ +cd /d %DST_PACKAGE_PATH%\tools\converter\converter\ SET TYPE_ID='' SET MODEL_NAME='' SET SUFFIX='' - -echo "Convert models" -copy %BENCHMARK_BASE%\%SO_PATH%\tools\converter\lib\* %BENCHMARK_BASE%\%SO_PATH%\tools\converter\converter\ -cd /d %BENCHMARK_BASE%\%SO_PATH%\tools\converter\converter\ +SET MODEL_CONFIG=%BASEPATH%\win_models.cfg +SET MODEL_PATH=%MODEL_PATH_BASE:"=%\models\hiai for /f "tokens=1-2 delims= " %%i in (%MODEL_CONFIG%) do ( for /f "tokens=1-2 delims=." %%k in ("%%j") do ( @@ -74,15 +51,14 @@ for /f "tokens=1-2 delims= " %%i in (%MODEL_CONFIG%) do ( SET MODEL_NAME=%%k SET SUFFIX=%%l ) - - if !TYPE_ID!==1 (converter_lite --fmk=MINDIR --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!") - if !TYPE_ID!==2 (converter_lite --fmk=MINDIR --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!_train" --trainModel=true) - if !TYPE_ID!==3 (converter_lite --fmk=CAFFE --modelFile="%MODEL_PATH%/!MODEL_NAME!.prototxt" --weightFile="%MODEL_PATH%/!MODEL_NAME!.caffemodel" --outputFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!") - if !TYPE_ID!==4 (converter_lite --fmk=ONNX --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!") - if !TYPE_ID!==5 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!") - if !TYPE_ID!==6 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!" --quantType=AwareTraining) - if !TYPE_ID!==7 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!_posttraining" --quantType=PostTraining --configFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!_posttraining.config") - if !TYPE_ID!==8 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!_weightquant" --quantType=WeightQuant --bitNum=8 --quantWeightSize=500 --quantWeightChannel=16) + if !TYPE_ID!==1 (converter_lite --fmk=MINDIR --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!") + if !TYPE_ID!==2 (converter_lite --fmk=MINDIR --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!_train" --trainModel=true) + if !TYPE_ID!==3 (converter_lite --fmk=CAFFE --modelFile="%MODEL_PATH%/!MODEL_NAME!.prototxt" --weightFile="%MODEL_PATH%/!MODEL_NAME!.caffemodel" --outputFile="%DST_PACKAGE_PATH%\!MODEL_NAME!") + if !TYPE_ID!==4 (converter_lite --fmk=ONNX --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!") + if !TYPE_ID!==5 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!") + if !TYPE_ID!==6 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!" --quantType=AwareTraining) + if !TYPE_ID!==7 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!_posttraining" --quantType=PostTraining --configFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!_posttraining.config") + if !TYPE_ID!==8 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outputFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!_weightquant" --quantType=WeightQuant --bitNum=8 --quantWeightSize=500 --quantWeightChannel=16) IF !errorlevel! == 0 ( echo "Run converter for model (TYPE_ID=!TYPE_ID!) !MODEL_NAME!.!SUFFIX! : pass!" @@ -94,8 +70,9 @@ for /f "tokens=1-2 delims= " %%i in (%MODEL_CONFIG%) do ( ) echo "Run converted models" -copy %BENCHMARK_BASE%\%SO_PATH%\inference\lib\* %BENCHMARK_BASE%\%SO_PATH%\tools\benchmark\ -cd /d %BENCHMARK_BASE%\%SO_PATH%\tools\benchmark\ +copy %DST_PACKAGE_PATH%\inference\lib\* %DST_PACKAGE_PATH%\tools\benchmark\ +cd /d %DST_PACKAGE_PATH%\tools\benchmark\ + SET INPUT_BASE=%MODEL_PATH%/input_output/input SET OUTPUT_BASE=%MODEL_PATH%/input_output/output @@ -105,15 +82,14 @@ for /f "tokens=1-2 delims= " %%i in (%MODEL_CONFIG%) do ( SET MODEL_NAME=%%k SET SUFFIX=%%l ) - - if !TYPE_ID!==1 (benchmark --modelFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out" --accuracyThreshold=1.5) - if !TYPE_ID!==2 (benchmark --modelFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!_train.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out" --accuracyThreshold=1.5) - if !TYPE_ID!==3 (benchmark --modelFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.ms.out") - if !TYPE_ID!==4 (benchmark --modelFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out") - if !TYPE_ID!==5 (benchmark --modelFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out") - if !TYPE_ID!==6 (benchmark --modelFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out") - if !TYPE_ID!==7 (benchmark --modelFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!_posttraining.ms" --inDataFile="%MODEL_PATH_BASE%/quantTraining/mnist_calibration_data/00099.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!_posttraining.ms.out") - if !TYPE_ID!==8 (benchmark --modelFile="%BENCHMARK_BASE%/ms/!MODEL_NAME!.!SUFFIX!_weightquant.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out") + if !TYPE_ID!==1 (benchmark --modelFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out" --accuracyThreshold=1.5) + if !TYPE_ID!==2 (benchmark --modelFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!_train.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out" --accuracyThreshold=1.5) + if !TYPE_ID!==3 (benchmark --modelFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.ms.out") + if !TYPE_ID!==4 (benchmark --modelFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out") + if !TYPE_ID!==5 (benchmark --modelFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out") + if !TYPE_ID!==6 (benchmark --modelFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out") + if !TYPE_ID!==7 (benchmark --modelFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!_posttraining.ms" --inDataFile="%MODEL_PATH_BASE%/quantTraining/mnist_calibration_data/00099.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!_posttraining.ms.out") + if !TYPE_ID!==8 (benchmark --modelFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!_weightquant.ms" --inDataFile="%INPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.bin" --benchmarkDataFile="%OUTPUT_BASE%/!MODEL_NAME!.!SUFFIX!.ms.out") IF !errorlevel! == 0 ( echo "Run benchmark for model (TYPE_ID=!TYPE_ID!) !MODEL_NAME!.!SUFFIX! : pass!" @@ -126,6 +102,7 @@ for /f "tokens=1-2 delims= " %%i in (%MODEL_CONFIG%) do ( :run_eof cd /d %BASEPATH% + rd /s /q %PACKAGE_NAME% IF %RET_CODE% == 0 ( SET errorlevel=0 echo "Run models in Windows success!" diff --git a/scripts/lite_release_package.sh b/scripts/lite_release_package.sh index 97301363db0..30d85fa7e36 100644 --- a/scripts/lite_release_package.sh +++ b/scripts/lite_release_package.sh @@ -48,40 +48,39 @@ function verify_every_file() { function android_release_package() { - for arch in "aarch32" "aarch64" - do - src_inference_pkg_name="mindspore-lite-${version}-inference-android-${arch}" - src_train_pkg_name="mindspore-lite-${version}-train-android-${arch}" - dst_pkg_name="mindspore-lite-${version}-android-${arch}" + arch=$1 + device=$2 + src_inference_pkg_name="mindspore-lite-${version}-inference-android-${arch}" + src_train_pkg_name="mindspore-lite-${version}-train-android-${arch}" + dst_pkg_name="mindspore-lite-${version}-android-${arch}" - rm -rf ${src_inference_pkg_name} - rm -rf ${src_train_pkg_name} - rm -rf ${dst_pkg_name} - tar -xzf ${input_path}/android_${arch}/${src_inference_pkg_name}.tar.gz - tar -xzf ${input_path}/android_${arch}/${src_train_pkg_name}.tar.gz + rm -rf ${src_inference_pkg_name} + rm -rf ${src_train_pkg_name} + rm -rf ${dst_pkg_name} + tar -xzf ${input_path}/android_${arch}/${device}/${src_inference_pkg_name}.tar.gz + tar -xzf ${input_path}/android_${arch}/${device}/${src_train_pkg_name}.tar.gz - cp -r ${src_train_pkg_name}/tools/benchmark_train/ ${src_inference_pkg_name}/tools/ - cp -r ${src_train_pkg_name}/train/ ${src_inference_pkg_name}/ - mkdir -p ${output_path}/release/android/ - mv ${src_inference_pkg_name} ${dst_pkg_name} - # Copy java runtime to Android package - cp ${input_path}/aar/mindspore-lite-*maven*.zip ${dst_pkg_name} - tar -czf ${output_path}/release/android/${dst_pkg_name}.tar.gz ${dst_pkg_name} - cd ${output_path}/release/android/ - sha256sum ${dst_pkg_name}.tar.gz > ${dst_pkg_name}.tar.gz.sha256 - cd - + cp -r ${src_train_pkg_name}/tools/benchmark_train/ ${src_inference_pkg_name}/tools/ + cp -r ${src_train_pkg_name}/train/ ${src_inference_pkg_name}/ + mkdir -p ${output_path}/release/android/${device}/ + mv ${src_inference_pkg_name} ${dst_pkg_name} + # Copy java runtime to Android package + cp ${input_path}/aar/avx/mindspore-lite-*maven*.zip ${dst_pkg_name} + tar -czf ${output_path}/release/android/${device}/${dst_pkg_name}.tar.gz ${dst_pkg_name} + cd ${output_path}/release/android/${device}/ + sha256sum ${dst_pkg_name}.tar.gz > ${dst_pkg_name}.tar.gz.sha256 + cd - - verify_every_file ${src_train_pkg_name}/tools/benchmark_train/ ${dst_pkg_name} - verify_every_file ${src_train_pkg_name}/train/ ${dst_pkg_name} + verify_every_file ${src_train_pkg_name}/tools/benchmark_train/ ${dst_pkg_name} + verify_every_file ${src_train_pkg_name}/train/ ${dst_pkg_name} - rm -rf ${src_train_pkg_name} - rm -rf ${dst_pkg_name} - done + rm -rf ${src_train_pkg_name} + rm -rf ${dst_pkg_name} } function linux_release_package() { - src_inference_pkg_name="mindspore-lite-${version}-inference-linux-x64-avx" + src_inference_pkg_name="mindspore-lite-${version}-inference-linux-x64" src_train_pkg_name="mindspore-lite-${version}-train-linux-x64" src_jar_pkg_name="mindspore-lite-${version}-inference-linux-x64-jar" dst_pkg_name="mindspore-lite-${version}-linux-x64" @@ -90,9 +89,9 @@ function linux_release_package() rm -rf ${src_train_pkg_name} rm -rf ${src_jar_pkg_name} rm -rf ${dst_pkg_name} - tar -xzf ${input_path}/ubuntu_x86/${src_inference_pkg_name}.tar.gz + tar -xzf ${input_path}/ubuntu_x86/avx/${src_inference_pkg_name}.tar.gz tar -xzf ${input_path}/ubuntu_x86/${src_train_pkg_name}.tar.gz - tar -xzf ${input_path}/aar/${src_jar_pkg_name}.tar.gz + tar -xzf ${input_path}/aar/avx/${src_jar_pkg_name}.tar.gz cp -r ${src_train_pkg_name}/tools/benchmark_train/ ${src_inference_pkg_name}/tools/ cp -r ${src_train_pkg_name}/train/ ${src_inference_pkg_name}/ @@ -115,12 +114,12 @@ function linux_release_package() function windows_release_package() { - src_inference_pkg_name="mindspore-lite-${version}-inference-win-x64-avx" + src_inference_pkg_name="mindspore-lite-${version}-inference-win-x64" dst_pkg_name="mindspore-lite-${version}-win-x64" rm -rf ${src_inference_pkg_name} rm -rf ${dst_pkg_name} - unzip ${input_path}/windows_x64/${src_inference_pkg_name}.zip + unzip ${input_path}/windows_x64/avx/${src_inference_pkg_name}.zip mv ${src_inference_pkg_name} ${dst_pkg_name} mkdir -p ${output_path}/release/windows/ @@ -138,7 +137,9 @@ input_path=$1 output_path=$2 version=`ls ${input_path}/android_aarch64/mindspore-lite-*-inference-*.tar.gz | awk -F'/' '{print $NF}' | cut -d"-" -f3` -android_release_package +android_release_package aarch32 +android_release_package aarch64 +android_release_package aarch64 gpu linux_release_package windows_release_package