lite build tod and iod package name unify

This commit is contained in:
zhengjun10 2021-06-14 16:49:16 +08:00
parent ea3d92c2ec
commit 23790f0435
23 changed files with 582 additions and 290 deletions

View File

@ -55,7 +55,7 @@ IF "%1%" == "lite" (
echo "======Start building MindSpore Lite %VERSION_MAJOR%.%VERSION_MINOR%.%VERSION_REVISION%======"
rd /s /q "%BASE_PATH%\output"
(git log -1 | findstr "^commit") > %BUILD_PATH%\.commit_id
cmake -DPLATFORM_X86_64=on -DBUILD_MINDDATA=off ^
cmake -DPLATFORM_X86_64=on -DBUILD_MINDDATA=off -DMSLITE_ENABLE_TRAIN=off ^
-DMS_VERSION_MAJOR=%VERSION_MAJOR% -DMS_VERSION_MINOR=%VERSION_MINOR% -DMS_VERSION_REVISION=%VERSION_REVISION% ^
-DCMAKE_BUILD_TYPE=Release -G "CodeBlocks - MinGW Makefiles" "%BASE_PATH%/mindspore/lite"
) ELSE (

View File

@ -492,25 +492,25 @@ write_commit_file() {
build_lite_x86_64_jni_and_jar()
{
# copy x86 so
local inference_or_train=inference
local is_train=off
local is_train=on
cd ${BASEPATH}/output/tmp
if [ -f "mindspore-lite-${VERSION_STR}-train-linux-x64.tar.gz" ]; then
inference_or_train=train
is_train=on
fi
local pkg_name=mindspore-lite-${VERSION_STR}-${inference_or_train}-linux-x64
local pkg_name=mindspore-lite-${VERSION_STR}-linux-x64
cd ${BASEPATH}/output/tmp/
rm -rf ${pkg_name}
tar -zxf ${BASEPATH}/output/tmp/${pkg_name}.tar.gz
rm -rf ${LITE_JAVA_PATH}/java/linux_x86/libs/ && mkdir -pv ${LITE_JAVA_PATH}/java/linux_x86/libs/
rm -rf ${LITE_JAVA_PATH}/native/libs/linux_x86/ && mkdir -pv ${LITE_JAVA_PATH}/native/libs/linux_x86/
cp ./${pkg_name}/inference/lib/*.so* ${LITE_JAVA_PATH}/java/linux_x86/libs/
cp ./${pkg_name}/inference/lib/*.so* ${LITE_JAVA_PATH}/native/libs/linux_x86/
if [ -f "mindspore-lite-${VERSION_STR}-train-linux-x64.tar.gz" ]; then
cp ./${pkg_name}/inference/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/java/linux_x86/libs/
cp ./${pkg_name}/inference/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/native/libs/linux_x86/
cp ./${pkg_name}/runtime/lib/*.so* ${LITE_JAVA_PATH}/java/linux_x86/libs/
cp ./${pkg_name}/runtime/lib/*.so* ${LITE_JAVA_PATH}/native/libs/linux_x86/
local train_so=$pkg_name/runtime/lib/libmindspore-lite-train.so
if [ ! -f "$train_so" ]; then
echo "not exist"
is_train=off
fi
if [[ "X$is_train" = "Xon" ]]; then
cp ./${pkg_name}/runtime/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/java/linux_x86/libs/
cp ./${pkg_name}/runtime/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/native/libs/linux_x86/
fi
# build jni so
cd ${BASEPATH}/mindspore/lite/build
@ -525,11 +525,11 @@ build_lite_x86_64_jni_and_jar()
fi
cp ./libmindspore-lite-jni.so ${LITE_JAVA_PATH}/java/linux_x86/libs/
cp ./libmindspore-lite-jni.so ${LITE_JAVA_PATH}/native/libs/linux_x86/
cp ./libmindspore-lite-jni.so ${BASEPATH}/output/tmp/${pkg_name}/inference/lib/
cp ./libmindspore-lite-jni.so ${BASEPATH}/output/tmp/${pkg_name}/runtime/lib/
if [[ "X$is_train" = "Xon" ]]; then
cp ./libmindspore-lite-train-jni.so ${LITE_JAVA_PATH}/java/linux_x86/libs/
cp ./libmindspore-lite-train-jni.so ${LITE_JAVA_PATH}/native/libs/linux_x86/
cp ./libmindspore-lite-train-jni.so ${BASEPATH}/output/tmp/${pkg_name}/inference/lib/
cp ./libmindspore-lite-train-jni.so ${BASEPATH}/output/tmp/${pkg_name}/runtime/lib/
fi
# build java common
@ -542,7 +542,7 @@ build_lite_x86_64_jni_and_jar()
cd ${LITE_JAVA_PATH}/java/linux_x86/
gradle clean
gradle releaseJar
cp ./build/lib/jar/*.jar ${BASEPATH}/output/tmp/${pkg_name}/inference/lib/
cp ./build/lib/jar/*.jar ${BASEPATH}/output/tmp/${pkg_name}/runtime/lib/
# package
cd ${BASEPATH}/output/tmp
@ -666,24 +666,24 @@ build_lite_arm64_and_jni() {
# build arm64
build_lite "arm64"
# copy arm64 so
local inference_or_train=inference
local is_train=off
if [ -f "${BASEPATH}/output/mindspore-lite-${VERSION_STR}-train-android-aarch64.tar.gz" ]; then
inference_or_train=train
is_train=on
fi
local pkg_name=mindspore-lite-${VERSION_STR}-${inference_or_train}-android-aarch64
local is_train=on
local pkg_name=mindspore-lite-${VERSION_STR}-android-aarch64
cd "${BASEPATH}/mindspore/lite/build"
rm -rf ${pkg_name}
tar -zxf ${BASEPATH}/output/${pkg_name}.tar.gz
rm -rf ${LITE_JAVA_PATH}/java/app/libs/arm64-v8a/ && mkdir -p ${LITE_JAVA_PATH}/java/app/libs/arm64-v8a/
rm -rf ${LITE_JAVA_PATH}/native/libs/arm64-v8a/ && mkdir -p ${LITE_JAVA_PATH}/native/libs/arm64-v8a/
cp ./${pkg_name}/${inference_or_train}/lib/*.so* ${LITE_JAVA_PATH}/java/app/libs/arm64-v8a/
cp ./${pkg_name}/${inference_or_train}/lib/*.so* ${LITE_JAVA_PATH}/native/libs/arm64-v8a/
if [ -f "${BASEPATH}/output/mindspore-lite-${VERSION_STR}-train-android-aarch64.tar.gz" ]; then
cp ./${pkg_name}/train/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/java/app/libs/arm64-v8a/
cp ./${pkg_name}/train/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/native/libs/arm64-v8a/
cp ./${pkg_name}/runtime/lib/*.so* ${LITE_JAVA_PATH}/java/app/libs/arm64-v8a/
cp ./${pkg_name}/runtime/lib/*.so* ${LITE_JAVA_PATH}/native/libs/arm64-v8a/
local train_so=$pkg_name/runtime/lib/libmindspore-lite-train.so
if [ ! -f "$train_so" ]; then
echo "not exist"
is_train=off
fi
if [[ "X$is_train" = "Xon" ]]; then
cp ./${pkg_name}/runtime/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/java/app/libs/arm64-v8a/
cp ./${pkg_name}/runtime/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/native/libs/arm64-v8a/
fi
# build jni so
[ -n "${BASEPATH}" ] && rm -rf java/jni && mkdir -pv java/jni
@ -710,24 +710,24 @@ build_lite_arm32_and_jni() {
# build arm32
build_lite "arm32"
# copy arm32 so
local inference_or_train=inference
local is_train=off
if [ -f "${BASEPATH}/output/mindspore-lite-${VERSION_STR}-train-android-aarch32.tar.gz" ]; then
inference_or_train=train
is_train=on
fi
local pkg_name=mindspore-lite-${VERSION_STR}-${inference_or_train}-android-aarch32
local is_train=on
local pkg_name=mindspore-lite-${VERSION_STR}-android-aarch32
cd "${BASEPATH}/mindspore/lite/build"
rm -rf ${pkg_name}
tar -zxf ${BASEPATH}/output/${pkg_name}.tar.gz
rm -rf ${LITE_JAVA_PATH}/java/app/libs/armeabi-v7a/ && mkdir -pv ${LITE_JAVA_PATH}/java/app/libs/armeabi-v7a/
rm -rf ${LITE_JAVA_PATH}/native/libs/armeabi-v7a/ && mkdir -pv ${LITE_JAVA_PATH}/native/libs/armeabi-v7a/
cp ./${pkg_name}/${inference_or_train}/lib/*.so* ${LITE_JAVA_PATH}/java/app/libs/armeabi-v7a/
cp ./${pkg_name}/${inference_or_train}/lib/*.so* ${LITE_JAVA_PATH}/native/libs/armeabi-v7a/
if [ -f "${BASEPATH}/output/mindspore-lite-${VERSION_STR}-train-android-aarch32.tar.gz" ]; then
cp ./${pkg_name}/train/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/java/app/libs/armeabi-v7a/
cp ./${pkg_name}/train/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/native/libs/armeabi-v7a/
cp ./${pkg_name}/runtime/lib/*.so* ${LITE_JAVA_PATH}/java/app/libs/armeabi-v7a/
cp ./${pkg_name}/runtime/lib/*.so* ${LITE_JAVA_PATH}/native/libs/armeabi-v7a/
local train_so=$pkg_name/runtime/lib/libmindspore-lite-train.so
if [ ! -f "$train_so" ]; then
echo "not exist"
is_train=off
fi
if [[ "X$is_train" = "Xon" ]]; then
cp ./${pkg_name}/runtime/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/java/app/libs/armeabi-v7a/
cp ./${pkg_name}/runtime/third_party/libjpeg-turbo/lib/*.so* ${LITE_JAVA_PATH}/native/libs/armeabi-v7a/
fi
# build jni so

View File

@ -8,12 +8,12 @@ set(OBFUSCATOR_ROOT_DIR ${RUNTIME_PKG_NAME}/tools/obfuscator)
set(CROPPER_ROOT_DIR ${RUNTIME_PKG_NAME}/tools/cropper)
set(TEST_CASE_DIR ${TOP_DIR}/mindspore/lite/test/build)
set(RUNTIME_DIR ${RUNTIME_PKG_NAME}/inference)
set(RUNTIME_INC_DIR ${RUNTIME_PKG_NAME}/inference/include)
set(RUNTIME_LIB_DIR ${RUNTIME_PKG_NAME}/inference/lib)
set(MIND_DATA_INC_DIR ${RUNTIME_PKG_NAME}/inference/include/dataset)
set(TURBO_DIR ${RUNTIME_PKG_NAME}/inference/third_party/libjpeg-turbo)
set(SECUREC_DIR ${RUNTIME_PKG_NAME}/inference/third_party/securec)
set(RUNTIME_DIR ${RUNTIME_PKG_NAME}/runtime)
set(RUNTIME_INC_DIR ${RUNTIME_PKG_NAME}/runtime/include)
set(RUNTIME_LIB_DIR ${RUNTIME_PKG_NAME}/runtime/lib)
set(MIND_DATA_INC_DIR ${RUNTIME_PKG_NAME}/runtime/include/dataset)
set(TURBO_DIR ${RUNTIME_PKG_NAME}/runtime/third_party/libjpeg-turbo)
set(SECUREC_DIR ${RUNTIME_PKG_NAME}/runtime/third_party/securec)
set(MINDSPORE_LITE_LIB_NAME libmindspore-lite)
set(MINDSPORE_CORE_LIB_NAME libmindspore_core)
set(BENCHMARK_NAME benchmark)

View File

@ -21,7 +21,7 @@ endif()
# Options that can be configured through environment variables or manually
set(MSLITE_GPU_BACKEND "opencl" CACHE STRING "enable gpu backend, only arm64 support, opencl/vulkan/cuda/off")
option(MSLITE_ENABLE_NPU "enable npu, only arm64 or arm32 support" on)
option(MSLITE_ENABLE_TRAIN "enable train" off)
option(MSLITE_ENABLE_TRAIN "enable train" on)
option(MSLITE_ENABLE_SSE "enable SSE instruction set, only x86_64 support" off)
option(MSLITE_ENABLE_AVX "enable AVX instruction set, only x86_64 support" off)
option(MSLITE_ENABLE_CONVERTER "enable converter, only x86_64 support" on)
@ -128,23 +128,20 @@ add_compile_options(-fPIC)
if(SUPPORT_TRAIN)
set(BUILD_MINDDATA "full")
set(TRAIN_OR_INFERENCE "train")
else()
set(TRAIN_OR_INFERENCE "inference")
endif()
if(PLATFORM_ARM64)
set(RUNTIME_COMPONENT_NAME ${TRAIN_OR_INFERENCE}-android-aarch64)
set(RUNTIME_COMPONENT_NAME "android-aarch64")
elseif(PLATFORM_ARM32)
set(RUNTIME_COMPONENT_NAME ${TRAIN_OR_INFERENCE}-android-aarch32)
set(RUNTIME_COMPONENT_NAME "android-aarch32")
elseif(WIN32)
if(CMAKE_SIZEOF_VOID_P EQUAL 4)
set(RUNTIME_COMPONENT_NAME ${TRAIN_OR_INFERENCE}-win-x86)
set(RUNTIME_COMPONENT_NAME "win-x86")
else()
set(RUNTIME_COMPONENT_NAME ${TRAIN_OR_INFERENCE}-win-x64)
set(RUNTIME_COMPONENT_NAME "win-x64")
endif()
else()
set(RUNTIME_COMPONENT_NAME ${TRAIN_OR_INFERENCE}-linux-x64)
set(RUNTIME_COMPONENT_NAME "linux-x64")
endif()
string(REPLACE "/mindspore/lite" "" TOP_DIR ${CMAKE_CURRENT_SOURCE_DIR})

View File

@ -37,8 +37,8 @@ if [ ! -e ${BASEPATH}/build/${MINDSPORE_FILE} ]; then
wget -c -O ${BASEPATH}/build/${MINDSPORE_FILE} --no-check-certificate ${MINDSPORE_LITE_DOWNLOAD_URL}
fi
tar xzvf ${BASEPATH}/build/${MINDSPORE_FILE} -C ${BASEPATH}/build/
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/inference/lib/libmindspore-lite.a ${BASEPATH}/lib
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/inference/include ${BASEPATH}/
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/runtime/lib/libmindspore-lite.a ${BASEPATH}/lib
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/runtime/include ${BASEPATH}/
cd ${BASEPATH}/build || exit
cmake ${BASEPATH}
make

View File

@ -37,7 +37,7 @@ if [ ! -e ${BASEPATH}/build/${MINDSPORE_FILE} ]; then
wget -c -O ${BASEPATH}/build/${MINDSPORE_FILE} --no-check-certificate ${MINDSPORE_LITE_DOWNLOAD_URL}
fi
tar xzvf ${BASEPATH}/build/${MINDSPORE_FILE} -C ${BASEPATH}/build/
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/inference/lib/jar/* ${BASEPATH}/lib
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/runtime/lib/jar/* ${BASEPATH}/lib
cd ${BASEPATH}/ || exit
mvn package

View File

@ -89,10 +89,10 @@ if [ ! -e ${BASEPATH}/build/${MINDSPORE_FILE} ]; then
wget -c -O ${BASEPATH}/build/${MINDSPORE_FILE} --no-check-certificate ${MINDSPORE_LITE_DOWNLOAD_URL}
fi
tar xzvf ${BASEPATH}/build/${MINDSPORE_FILE} -C ${BASEPATH}/build/
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/inference/lib/libmindspore-lite.a ${BASEPATH}/lib
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/inference/include ${BASEPATH}/
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/runtime/lib/libmindspore-lite.a ${BASEPATH}/lib
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/runtime/include ${BASEPATH}/
if [[ "X${DEVICE}" == "Xnpu" ]]; then
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/inference/third_party/hiai_ddk/lib/*.so ${BASEPATH}/lib
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/runtime/third_party/hiai_ddk/lib/*.so ${BASEPATH}/lib
fi
cd ${BASEPATH}/build || exit
cmake -DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" -DANDROID_NATIVE_API_LEVEL="19" \

View File

@ -27,7 +27,7 @@ endif()
get_filename_component(PKG_PATH ${PKG_PATH} ABSOLUTE BASE_DIR ${CMAKE_CURRENT_BINARY_DIR})
set(HEADER_PATH ${PKG_PATH}/inference)
set(HEADER_PATH ${PKG_PATH}/runtime)
option(PLATFORM_ARM64 "build android arm64" OFF)
option(PLATFORM_ARM32 "build android arm32" OFF)
@ -89,10 +89,10 @@ endif()
get_filename_component(PKG_PATH ${PKG_PATH} ABSOLUTE BASE_DIR ${CMAKE_CURRENT_BINARY_DIR})
set(OP_LIB ${PKG_PATH}/inference/lib/libmindspore-lite.a)
set(OP_LIB ${PKG_PATH}/runtime/lib/libmindspore-lite.a)
set(WRAPPER_LIB ${PKG_PATH}/tools/codegen/lib/libwrapper.a)
set(OP_HEADER_PATH ${PKG_PATH}/tools/codegen/include)
set(HEADER_PATH ${PKG_PATH}/inference)
set(HEADER_PATH ${PKG_PATH}/runtime)
message(STATUS "operator lib path: ${OP_LIB}")
message(STATUS "operator header path: ${OP_HEADER_PATH}")

View File

@ -8,7 +8,7 @@ endif()
get_filename_component(PKG_PATH ${PKG_PATH} ABSOLUTE BASE_DIR ${CMAKE_CURRENT_BINARY_DIR})
set(HEADER_PATH ${PKG_PATH}/inference)
set(HEADER_PATH ${PKG_PATH}/runtime)
option(MICRO_BUILD_ARM64 "build android arm64" OFF)
option(MICRO_BUILD_ARM32A "build android arm32" OFF)

View File

@ -10,7 +10,7 @@ get_filename_component(PKG_PATH ${PKG_PATH} ABSOLUTE BASE_DIR ${CMAKE_CURRENT_BI
set(OP_LIB ${PKG_PATH}/tools/codegen/operator_library/lib/libops.a)
set(OP_HEADER_PATH ${PKG_PATH}/tools/codegen/operator_library/include)
set(HEADER_PATH ${PKG_PATH}/inference)
set(HEADER_PATH ${PKG_PATH}/runtime)
message("operator lib path: ${OP_LIB}")
message("operator header path: ${OP_HEADER_PATH}")

View File

@ -8,7 +8,7 @@ endif()
get_filename_component(PKG_PATH ${PKG_PATH} ABSOLUTE BASE_DIR ${CMAKE_CURRENT_BINARY_DIR})
set(HEADER_PATH ${PKG_PATH}/inference)
set(HEADER_PATH ${PKG_PATH}/runtime)
option(PLATFORM_ARM64 "build android arm64" OFF)
option(PLATFORM_ARM32 "build android arm32" OFF)

View File

@ -8,10 +8,10 @@ endif()
get_filename_component(PKG_PATH ${PKG_PATH} ABSOLUTE BASE_DIR ${CMAKE_CURRENT_BINARY_DIR})
set(OP_LIB ${PKG_PATH}/inference/lib/libmindspore-lite.a)
set(OP_LIB ${PKG_PATH}/runtime/lib/libmindspore-lite.a)
set(WRAPPER_LIB ${PKG_PATH}/tools/codegen/lib/libwrapper.a)
set(OP_HEADER_PATH ${PKG_PATH}/tools/codegen/include)
set(HEADER_PATH ${PKG_PATH}/inference)
set(HEADER_PATH ${PKG_PATH}/runtime)
message("operator lib path: ${OP_LIB}")
message("operator header path: ${OP_HEADER_PATH}")

View File

@ -4,7 +4,7 @@ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror -Wall -fPIC -std=c++17")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-sign-compare")
set(MS_DIR "${CMAKE_CURRENT_SOURCE_DIR}/mindspore-lite-1.2.0-inference-linux-x64/inference")
set(MS_DIR "${CMAKE_CURRENT_SOURCE_DIR}/mindspore-lite-1.2.0-linux-x64/runtime")
include_directories(${MS_DIR})

View File

@ -2,6 +2,6 @@ deeplabv3.r1.1.mindir 1.5
mobilenetv2.r1.1.mindir 0.5
ssd.r1.1.mindir 0.5
ssd_ghostnet.r1.1.mindir 2.0
lenet_quant.mindir 0.5
# lenet_quant.mindir 0.5
deeplabv3_fzy.mindir 1.5
mobilenetv2_fzy.mindir 0.5

View File

@ -61,14 +61,10 @@ echo 'run common ut tests'
./lite-test --gtest_filter="PassRegistryTest.TestRegistry"
# test cases specific for train
if [[ $1 == train ]]; then
echo 'run train ut tests'
## ./lite-test --gtest_filter=NetworkTest.efficient_net
## ./lite-test --gtest_filter="NetworkTest.tuning_layer"
## ./lite-test --gtest_filter="NetworkTest.lenetnet"
fi
if [[ $1 == inference ]]; then
echo 'run inference ut tests'
./lite-test --gtest_filter="ControlFlowTest.TestMergeWhileModel"
fi
echo 'run train ut tests'
## ./lite-test --gtest_filter=NetworkTest.efficient_net
## ./lite-test --gtest_filter="NetworkTest.tuning_layer"
## ./lite-test --gtest_filter="NetworkTest.lenetnet"
echo 'run inference ut tests'
./lite-test --gtest_filter="ControlFlowTest.TestMergeWhileModel"

View File

@ -35,8 +35,8 @@ function Run_Export(){
# Run converter on x86 platform:
function Run_Converter() {
cd ${x86_path} || exit 1
tar -zxf mindspore-lite-${version}-train-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-train-linux-x64/ || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-linux-x64/ || exit 1
cp tools/converter/converter/converter_lite ./ || exit 1
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./tools/converter/lib/:./tools/converter/third_party/glog/lib
@ -73,8 +73,8 @@ function Run_Converter() {
# Run on x86 platform:
function Run_x86() {
cd ${x86_path}/mindspore-lite-${version}-train-linux-x64 || return 1
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./inference/lib:./inference/third_party/libjpeg-turbo/lib
cd ${x86_path}/mindspore-lite-${version}-linux-x64 || return 1
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./runtime/lib:./runtime/third_party/libjpeg-turbo/lib
# Run mindspore converted train models:
fail=0
while read line; do
@ -137,24 +137,24 @@ function Run_arm() {
# Unzip
cd ${arm_path} || exit 1
tar -zxf mindspore-lite-${version_arm}-train-android-${process_unit}.tar.gz || exit 1
tar -zxf mindspore-lite-${version_arm}-android-${process_unit}.tar.gz || exit 1
# If build with minddata, copy the minddata related libs
cd ${benchmark_train_test_path} || exit 1
if [ -f ${arm_path}/mindspore-lite-${version_arm}-train-android-${process_unit}/inference/lib/libminddata-lite.so ]; then
cp -a ${arm_path}/mindspore-lite-${version_arm}-train-android-${process_unit}/inference/third_party/libjpeg-turbo/lib/libjpeg.so* ${benchmark_train_test_path}/ || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-train-android-${process_unit}/inference/third_party/libjpeg-turbo/lib/libturbojpeg.so* ${benchmark_train_test_path}/ || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-train-android-${process_unit}/inference/lib/libminddata-lite.so ${benchmark_train_test_path}/libminddata-lite.so || exit 1
if [ -f ${arm_path}/mindspore-lite-${version_arm}-android-${process_unit}/runtime/lib/libminddata-lite.so ]; then
cp -a ${arm_path}/mindspore-lite-${version_arm}-android-${process_unit}/runtime/third_party/libjpeg-turbo/lib/libjpeg.so* ${benchmark_train_test_path}/ || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-android-${process_unit}/runtime/third_party/libjpeg-turbo/lib/libturbojpeg.so* ${benchmark_train_test_path}/ || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-android-${process_unit}/runtime/lib/libminddata-lite.so ${benchmark_train_test_path}/libminddata-lite.so || exit 1
fi
if [ "$1" == arm64 ] || [ "$1" == arm32 ]; then
cp -a ${arm_path}/mindspore-lite-${version_arm}-train-android-${process_unit}/inference/third_party/hiai_ddk/lib/libhiai.so ${benchmark_train_test_path}/libhiai.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-train-android-${process_unit}/inference/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_train_test_path}/libhiai_ir.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-train-android-${process_unit}/inference/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_train_test_path}/libhiai_ir_build.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-android-${process_unit}/runtime/third_party/hiai_ddk/lib/libhiai.so ${benchmark_train_test_path}/libhiai.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-android-${process_unit}/runtime/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_train_test_path}/libhiai_ir.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-android-${process_unit}/runtime/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_train_test_path}/libhiai_ir_build.so || exit 1
fi
cp -a ${arm_path}/mindspore-lite-${version_arm}-train-android-${process_unit}/inference/lib/libmindspore-lite.so ${benchmark_train_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-train-android-${process_unit}/inference/lib/libmindspore-lite-train.so ${benchmark_train_test_path}/libmindspore-lite-train.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-train-android-${process_unit}/tools/benchmark_train/benchmark_train ${benchmark_train_test_path}/benchmark_train || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-android-${process_unit}/runtime/lib/libmindspore-lite.so ${benchmark_train_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-android-${process_unit}/runtime/lib/libmindspore-lite-train.so ${benchmark_train_test_path}/libmindspore-lite-train.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version_arm}-android-${process_unit}/tools/benchmark_train/benchmark_train ${benchmark_train_test_path}/benchmark_train || exit 1
# adb push all needed files to the phone
adb -s ${device_id} push ${benchmark_train_test_path} /data/local/tmp/ > ${adb_push_log_file}
@ -316,19 +316,19 @@ fi
echo $train_io_path
arm64_path=${release_path}/android_aarch64
file=$(ls ${arm64_path}/*train-android-aarch64.tar.gz)
file=$(ls ${arm64_path}/*android-aarch64.tar.gz)
file_name="${file##*/}"
IFS="-" read -r -a file_name_array <<< "$file_name"
version_arm64=${file_name_array[2]}
arm32_path=${release_path}/android_aarch32
file=$(ls ${arm32_path}/*train-android-aarch32.tar.gz)
file=$(ls ${arm32_path}/*android-aarch32.tar.gz)
file_name="${file##*/}"
IFS="-" read -r -a file_name_array <<< "$file_name"
version_arm32=${file_name_array[2]}
x86_path=${release_path}/ubuntu_x86
file=$(ls ${x86_path}/*train-linux-x64.tar.gz)
file=$(ls ${x86_path}/*linux-x64.tar.gz)
file_name="${file##*/}"
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}

View File

@ -4,8 +4,8 @@
function Run_Converter() {
# Unzip x86 runtime and converter
cd ${x86_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-inference-linux-x64/ || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-linux-x64/ || exit 1
cp tools/converter/converter/converter_lite ./ || exit 1
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./tools/converter/lib/:./tools/converter/third_party/glog/lib
@ -95,7 +95,7 @@ function Run_Converter() {
fi
done < ${models_mindspore_config}
# Convert mindspore train models:
# Convert mindspore quant train models:
while read line; do
model_name=${line}
if [[ $model_name == \#* ]]; then
@ -111,6 +111,32 @@ function Run_Converter() {
fi
done < ${models_mindspore_train_config}
rm -rf ${ms_train_models_path}
mkdir -p ${ms_train_models_path}
# Convert mindspore train models:
while read line; do
LFS=" " read -r -a line_array <<< ${line}
WEIGHT_QUANT=""
model_prefix=${line_array[0]}'_train'
model_name=${line_array[0]}'_train'
if [[ $model_name == \#* ]]; then
continue
fi
if [[ "${line_array[1]}" == "weight_quant" ]]; then
WEIGHT_QUANT="--quantType=WeightQuant --bitNum=8 --quantWeightSize=0 --quantWeightChannel=0"
model_name=${line_array[0]}'_train_quant'
fi
echo ${model_name} >> "${run_converter_log_file}"
echo './converter_lite --fmk=MINDIR --modelFile='${train_models_path}'/'${model_prefix}'.mindir --outputFile='${ms_train_models_path}'/'${model_name}' --trainModel=true' ${WEIGHT_QUANT} >> "${run_converter_log_file}"
./converter_lite --fmk=MINDIR --modelFile=${train_models_path}/${model_prefix}.mindir --outputFile=${ms_train_models_path}/${model_name} --trainModel=true ${WEIGHT_QUANT}
if [ $? = 0 ]; then
converter_result='converter mindspore_train '${model_name}' pass';echo ${converter_result} >> ${run_converter_result_file}
else
converter_result='converter mindspore_train '${model_name}' failed';echo ${converter_result} >> ${run_converter_result_file};return 1
fi
done < ${models_ms_train_config}
# Convert TFLite PostTraining models:
while read line; do
posttraining_line_info=${line}
@ -424,9 +450,9 @@ function Run_Converter() {
function Run_arm64_codegen() {
echo "ANDROID_NDK: ${ANDROID_NDK}" >> ${run_arm64_fp32_codegen_log_file}
cd ${arm64_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-android-aarch64.tar.gz || exit 1
local PKG_PATH=${arm64_path}/mindspore-lite-${version}-inference-android-aarch64
local CODEGEN_PATH=${x86_path}/mindspore-lite-${version}-inference-linux-x64/tools/codegen
tar -zxf mindspore-lite-${version}-android-aarch64.tar.gz || exit 1
local PKG_PATH=${arm64_path}/mindspore-lite-${version}-android-aarch64
local CODEGEN_PATH=${x86_path}/mindspore-lite-${version}-linux-x64/tools/codegen
rm -rf ${build_path}
mkdir -p ${build_path}
@ -512,9 +538,9 @@ function Run_arm64_codegen() {
function Run_arm32_codegen() {
echo "ANDROID_NDK: ${ANDROID_NDK}" >> ${run_arm32_fp32_codegen_log_file}
cd ${arm32_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-android-aarch32.tar.gz || exit 1
local PKG_PATH=${arm32_path}/mindspore-lite-${version}-inference-android-aarch32
local CODEGEN_PATH=${x86_path}/mindspore-lite-${version}-inference-linux-x64/tools/codegen
tar -zxf mindspore-lite-${version}-android-aarch32.tar.gz || exit 1
local PKG_PATH=${arm32_path}/mindspore-lite-${version}-android-aarch32
local CODEGEN_PATH=${x86_path}/mindspore-lite-${version}-linux-x64/tools/codegen
rm -rf ${build_path}
mkdir -p ${build_path}
@ -600,27 +626,36 @@ function Run_arm32_codegen() {
# Run on arm64 platform:
function Run_arm64() {
cd ${arm64_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-android-aarch64.tar.gz || exit 1
tar -zxf mindspore-lite-${version}-android-aarch64.tar.gz || exit 1
# If build with minddata, copy the minddata related libs
cd ${benchmark_test_path} || exit 1
if [ -f ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/lib/libminddata-lite.so ]; then
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
if [ -f ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/lib/libminddata-lite.so ]; then
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/libjpeg-turbo/lib/libjpeg.so* ${benchmark_test_path}/ || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/libjpeg-turbo/lib/libturbojpeg.so* ${benchmark_test_path}/ || exit 1
fi
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/lib/libmindspore-lite-train.so ${benchmark_test_path}/libmindspore-lite-train.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/tools/benchmark_train/benchmark_train ${benchmark_train_test_path}/benchmark_train || exit 1
# adb push all needed files to the phone
adb -s ${device_id} push ${benchmark_test_path} /data/local/tmp/ > adb_push_log.txt
adb -s ${device_id} push ${benchmark_test_path} /data/local/tmp/ > adb_push_log.txt
cp -a ${benchmark_test_path}/lib*so* ${benchmark_train_test_path}/
adb -s ${device_id} push ${benchmark_train_test_path} /data/local/tmp/ > adb_push_log.txt
# run adb ,run session ,check the result:
echo 'cd /data/local/tmp/benchmark_test' > adb_cmd.txt
echo 'cp /data/local/tmp/libc++_shared.so ./' >> adb_cmd.txt
echo 'chmod 777 benchmark' >> adb_cmd.txt
echo 'cd /data/local/tmp/benchmark_train_test' >> adb_cmd.txt
echo 'cp /data/local/tmp/libc++_shared.so ./' >> adb_cmd.txt
echo 'chmod 777 benchmark_train' >> adb_cmd.txt
adb -s ${device_id} shell < adb_cmd.txt
@ -1017,31 +1052,91 @@ function Run_arm64() {
run_result='arm64: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_result_file}; return 1
fi
done < ${models_for_process_only_config}
fail=0
# Run mindir converted train models:
tmp_dir=/data/local/tmp/benchmark_train_test
while read line; do
LFS=" " read -r -a line_array <<< ${line}
model_prefix=${line_array[0]}
model_name=${line_array[0]}'_train'
accuracy_limit=0.5
if [[ $model_name == \#* ]]; then
continue
fi
if [[ "${line_array[1]}" == "weight_quant" ]]; then
model_name=${line_array[0]}'_train_quant'
accuracy_limit=${line_array[2]}
fi
export_file="${tmp_dir}/${model_name}_tod"
inference_file="${tmp_dir}/${model_name}_infer"
# run benchmark_train test with clib data
echo ${model_name} >> "${run_arm64_fp32_log_file}"
adb -s ${device_id} push ${train_io_path}/${model_prefix}_input*.bin ${train_io_path}/${model_prefix}_output*.bin /data/local/tmp/benchmark_train_test >> adb_push_log.txt
echo 'cd /data/local/tmp/benchmark_train_test' > adb_run_cmd.txt
echo 'chmod 777 benchmark_train' >> adb_run_cmd.txt
adb -s ${device_id} shell < adb_run_cmd.txt >> ${run_arm64_fp32_log_file}
echo "rm -f ${export_file}* ${inference_file}*" >> ${run_arm64_fp32_log_file}
echo "rm -f ${export_file}* ${inference_file}*" >> adb_run_cmd.txt
adb -s ${device_id} shell < adb_run_cmd.txt >> ${run_arm64_fp32_log_file}
adb_cmd=$(cat <<-ENDM
export LD_LIBRARY_PATH=./:/data/local/tmp/:/data/local/tmp/benchmark_train_test;./benchmark_train \
--epochs=${epoch_num} \
--modelFile=${model_name}.ms \
--inDataFile=${tmp_dir}/${model_prefix}_input \
--expectedDataFile=${tmp_dir}/${model_prefix}_output \
--numThreads=${threads} \
--accuracyThreshold=${accuracy_limit} \
--inferenceFile=${inference_file} \
--exportFile=${export_file}
ENDM
)
echo "${adb_cmd}" >> ${run_arm64_fp32_log_file}
echo "${adb_cmd}" >> adb_run_cmd.txt
adb -s ${device_id} shell < adb_run_cmd.txt >> ${run_arm64_fp32_log_file}
# TODO: change to arm_type
if [ $? = 0 ]; then
run_result='arm64_train: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_train_result_file}
else
run_result='arm64_train: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_train_result_file};
fail=1
fi
done < ${models_ms_train_config}
return ${fail}
}
# Run on arm32 platform:
function Run_arm32() {
cd ${arm32_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-android-aarch32.tar.gz || exit 1
tar -zxf mindspore-lite-${version}-android-aarch32.tar.gz || exit 1
# If build with minddata, copy the minddata related libs
cd ${benchmark_test_path} || exit 1
if [ -f ${arm32_path}/mindspore-lite-${version}-inference-android-aarch32/inference/lib/libminddata-lite.so ]; then
cp -a ${arm32_path}/mindspore-lite-${version}-inference-android-aarch32/inference/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
if [ -f ${arm32_path}/mindspore-lite-${version}-android-aarch32/runtime/lib/libminddata-lite.so ]; then
cp -a ${arm32_path}/mindspore-lite-${version}-android-aarch32/runtime/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-android-aarch32/runtime/third_party/libjpeg-turbo/lib/libturbojpeg.so* ${benchmark_test_path}/ || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-android-aarch32/runtime/third_party/libjpeg-turbo/lib/libjpeg.so* ${benchmark_test_path}/ || exit 1
fi
cp -a ${arm32_path}/mindspore-lite-${version}-inference-android-aarch32/inference/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-inference-android-aarch32/inference/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-inference-android-aarch32/inference/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-inference-android-aarch32/inference/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-inference-android-aarch32/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-android-aarch32/runtime/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-android-aarch32/runtime/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-android-aarch32/runtime/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-android-aarch32/runtime/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-android-aarch32/runtime/lib/libmindspore-lite-train.so ${benchmark_test_path}/libmindspore-lite-train.so || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-android-aarch32/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
cp -a ${arm32_path}/mindspore-lite-${version}-android-aarch32/tools/benchmark_train/benchmark_train ${benchmark_train_test_path}/benchmark_train || exit 1
# adb push all needed files to the phone
adb -s ${device_id} push ${benchmark_test_path} /data/local/tmp/ > adb_push_log.txt
# train ms file may be same,need push diff folder
cp -a ${benchmark_test_path}/lib*so* ${benchmark_train_test_path}/
adb -s ${device_id} push ${benchmark_train_test_path} /data/local/tmp/ > adb_push_log.txt
# run adb ,run session ,check the result:
echo 'cd /data/local/tmp/benchmark_test' > adb_cmd.txt
echo 'cp /data/local/tmp/arm32/libc++_shared.so ./' >> adb_cmd.txt
echo 'chmod 777 benchmark' >> adb_cmd.txt
echo 'cd /data/local/tmp/benchmark_train_test' >> adb_cmd.txt
echo 'cp /data/local/tmp/arm32/libc++_shared.so ./' >> adb_cmd.txt
echo 'chmod 777 benchmark_train' >> adb_cmd.txt
adb -s ${device_id} shell < adb_cmd.txt
@ -1073,24 +1168,75 @@ function Run_arm32() {
run_result='arm32: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_result_file}; return 1
fi
done < ${models_arm32_config}
fail=0
# Run mindir converted train models:
tmp_dir=/data/local/tmp/benchmark_train_test
while read line; do
LFS=" " read -r -a line_array <<< ${line}
model_prefix=${line_array[0]}
model_name=${line_array[0]}'_train'
accuracy_limit=0.5
if [[ $model_name == \#* ]]; then
continue
fi
if [[ "${line_array[1]}" == "weight_quant" ]]; then
model_name=${line_array[0]}'_train_quant'
accuracy_limit=${line_array[2]}
fi
export_file="${tmp_dir}/${model_name}_tod"
inference_file="${tmp_dir}/${model_name}_infer"
# run benchmark_train test without clib data
echo ${model_name} >> "${run_arm32_log_file}"
adb -s ${device_id} push ${train_io_path}/${model_prefix}_input*.bin ${train_io_path}/${model_prefix}_output*.bin /data/local/tmp/benchmark_train_test >> adb_push_log.txt
echo 'cd /data/local/tmp/benchmark_train_test' > adb_run_cmd.txt
echo 'chmod 777 benchmark_train' >> adb_run_cmd.txt
adb -s ${device_id} shell < adb_run_cmd.txt >> ${run_arm32_log_file}
echo "rm -f ${export_file}* ${inference_file}*" >> ${run_arm32_log_file}
echo "rm -f ${export_file}* ${inference_file}*" >> adb_run_cmd.txt
adb -s ${device_id} shell < adb_run_cmd.txt >> ${run_arm32_log_file}
adb_cmd=$(cat <<-ENDM
export LD_LIBRARY_PATH=./:/data/local/tmp/:/data/local/tmp/benchmark_train_test;./benchmark_train \
--epochs=${epoch_num} \
--modelFile=${model_name}.ms \
--inDataFile=${tmp_dir}/${model_prefix}_input \
--expectedDataFile=${tmp_dir}/${model_prefix}_output \
--numThreads=${threads} \
--accuracyThreshold=${accuracy_limit} \
--inferenceFile=${inference_file} \
--exportFile=${export_file}
ENDM
)
echo "${adb_cmd}" >> ${run_arm32_log_file}
echo "${adb_cmd}" >> adb_run_cmd.txt
adb -s ${device_id} shell < adb_run_cmd.txt >> ${run_arm32_log_file}
# TODO: change to arm_type
if [ $? = 0 ]; then
run_result='arm32_train: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_train_result_file}
else
run_result='arm32_train: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_train_result_file};
fail=1
fi
done < ${models_ms_train_config}
return ${fail}
}
# Run on arm64-fp16 platform:
function Run_arm64_fp16() {
cd ${arm64_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-android-aarch64.tar.gz || exit 1
tar -zxf mindspore-lite-${version}-android-aarch64.tar.gz || exit 1
# If build with minddata, copy the minddata related libs
cd ${benchmark_test_path} || exit 1
if [ -f ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/lib/libminddata-lite.so ]; then
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
if [ -f ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/lib/libminddata-lite.so ]; then
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
fi
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
# adb push all needed files to the phone
adb -s ${device_id} push ${benchmark_test_path} /data/local/tmp/ > adb_push_log.txt
@ -1250,18 +1396,18 @@ function Run_arm64_fp16() {
# Run on armv8.2-a32-fp16 platform:
function Run_armv82_a32_fp16() {
cd ${armv82_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-android-aarch32.tar.gz || exit 1
tar -zxf mindspore-lite-${version}-android-aarch32.tar.gz || exit 1
# If build with minddata, copy the minddata related libs
cd ${benchmark_test_path} || exit 1
if [ -f ${armv82_path}/mindspore-lite-${version}-inference-android-aarch32/inference/minddata/lib/libminddata-lite.so ]; then
cp -a ${armv82_path}/mindspore-lite-${version}-inference-android-aarch32/inference/minddata/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
if [ -f ${armv82_path}/mindspore-lite-${version}-android-aarch32/runtime/minddata/lib/libminddata-lite.so ]; then
cp -a ${armv82_path}/mindspore-lite-${version}-android-aarch32/runtime/minddata/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
fi
cp -a ${armv82_path}/mindspore-lite-${version}-inference-android-aarch32/inference/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${armv82_path}/mindspore-lite-${version}-inference-android-aarch32/inference/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${armv82_path}/mindspore-lite-${version}-inference-android-aarch32/inference/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${armv82_path}/mindspore-lite-${version}-inference-android-aarch32/inference/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${armv82_path}/mindspore-lite-${version}-inference-android-aarch32/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
cp -a ${armv82_path}/mindspore-lite-${version}-android-aarch32/runtime/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${armv82_path}/mindspore-lite-${version}-android-aarch32/runtime/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${armv82_path}/mindspore-lite-${version}-android-aarch32/runtime/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${armv82_path}/mindspore-lite-${version}-android-aarch32/runtime/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${armv82_path}/mindspore-lite-${version}-android-aarch32/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
# adb push all needed files to the phone
adb -s ${device_id} push ${benchmark_test_path} /data/local/tmp/ > adb_push_log.txt
@ -1454,12 +1600,24 @@ function Print_Benchmark_Result() {
MS_PRINT_TESTCASE_END_MSG
}
function Print_Benchmark_Train_Result() {
MS_PRINT_TESTCASE_START_MSG
while read line; do
arr=("${line}")
printf "%-20s %-100s %-7s\n" ${arr[0]} ${arr[1]} ${arr[2]}
done < ${run_benchmark_train_result_file}
MS_PRINT_TESTCASE_END_MSG
}
basepath=$(pwd)
echo ${basepath}
#set -e
epoch_num=1
train_models_path=""
train_io_path=""
threads=2
# Example:sh run_benchmark_arm.sh -r /home/temp_test -m /home/temp_test/models -d "8KE5T19620002408" -e arm_cpu
while getopts "r:m:d:e:" opt; do
while getopts "r:m:d:e:M:q:i:t:" opt; do
case ${opt} in
r)
release_path=${OPTARG}
@ -1469,6 +1627,14 @@ while getopts "r:m:d:e:" opt; do
models_path=${OPTARG}
echo "models_path is ${OPTARG}"
;;
i)
train_io_path=${OPTARG}
echo "train_io_path is ${OPTARG}"
;;
M)
train_models_path=${OPTARG}
echo "train_models_path is ${models_path}"
;;
d)
device_id=${OPTARG}
echo "device_id is ${OPTARG}"
@ -1477,6 +1643,14 @@ while getopts "r:m:d:e:" opt; do
backend=${OPTARG}
echo "backend is ${OPTARG}"
;;
t)
epoch_num=${OPTARG}
echo "train epoch num is ${epoch_num}"
;;
q)
threads=${OPTARG}
echo "threads=${threads}"
;;
?)
echo "unknown para"
exit 1;;
@ -1486,7 +1660,7 @@ done
# mkdir train
x86_path=${release_path}/ubuntu_x86
file_name=$(ls ${x86_path}/*inference-linux-x64.tar.gz)
file_name=$(ls ${x86_path}/*linux-x64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
@ -1515,9 +1689,31 @@ models_with_multiple_inputs_config=${basepath}/../config/models_with_multiple_in
models_for_process_only_config=${basepath}/../config/models_for_process_only.cfg
models_tf_weightquant_config=${basepath}/../config/models_tf_weightquant.cfg
models_codegen_config=${basepath}/../codegen/models_codegen.cfg
models_ms_train_config=${basepath}/../config/models_ms_train.cfg
ms_models_path=${basepath}/ms_models
ms_train_models_path=${basepath}/ms_train_models
rm -rf ${ms_train_models_path}
mkdir -p ${ms_train_models_path}
build_path=${basepath}/codegen_build
logs_path=${basepath}/logs_train
rm -rf ${logs_path}
mkdir -p ${logs_path}
run_benchmark_train_result_file=${logs_path}/run_benchmark_train_result.txt
echo ' ' > ${run_benchmark_train_result_file}
if [[ $train_models_path == "" ]]
then
echo "train_io path is empty"
train_models_path="${models_path}/../../models_train"
fi
echo $train_models_path
if [[ $train_io_path == "" ]]
then
echo "train_io path is empty"
train_io_path=${train_models_path}/input_output
fi
echo $train_io_path
# Write converter result to temp file
run_converter_log_file=${basepath}/run_converter_log.txt
@ -1556,6 +1752,9 @@ echo 'run arm64_fp32 logs: ' > ${run_arm64_fp32_log_file}
run_arm64_fp32_codegen_log_file=${basepath}/run_arm64_fp32_codegen_log.txt
echo 'run arm64_codegen logs: ' > ${run_arm64_fp32_codegen_log_file}
run_arm64_train_fp32_log_file=${basepath}/run_arm64_train_fp32_log.txt
echo 'run arm64_train_fp32 logs: ' > ${run_arm64_train_fp32_log_file}
run_arm32_fp32_codegen_log_file=${basepath}/run_arm32_fp32_codegen_log.txt
echo 'run arm32_codegen logs: ' > ${run_arm32_fp32_codegen_log_file}
@ -1573,9 +1772,13 @@ echo "Push files to the arm and run benchmark"
benchmark_test_path=${basepath}/benchmark_test
rm -rf ${benchmark_test_path}
mkdir -p ${benchmark_test_path}
benchmark_train_test_path=${basepath}/benchmark_train_test
rm -rf ${benchmark_train_test_path}
mkdir -p ${benchmark_train_test_path}
cp -a ${ms_models_path}/*.ms ${benchmark_test_path} || exit 1
# Copy models converted using old release of mslite converter for compatibility test
cp -a ${models_path}/compatibility_test/*.ms ${benchmark_test_path} || exit 1
cp -a ${ms_train_models_path}/*.ms ${benchmark_train_test_path} || exit 1
backend=${backend:-"all"}
isFailed=0
@ -1583,7 +1786,7 @@ isFailed=0
if [[ $backend == "all" || $backend == "arm64_cpu" || $backend == "arm64_codegen" ]]; then
# Run on arm64
arm64_path=${release_path}/android_aarch64
file_name=$(ls ${arm64_path}/*inference-android-aarch64.tar.gz)
file_name=$(ls ${arm64_path}/*android-aarch64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
@ -1596,7 +1799,7 @@ fi
if [[ $backend == "all" || $backend == "arm32_cpu" || $backend == "arm32_codegen" ]]; then
# Run on arm32
arm32_path=${release_path}/android_aarch32
file_name=$(ls ${arm32_path}/*inference-android-aarch32.tar.gz)
file_name=$(ls ${arm32_path}/*android-aarch32.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
@ -1609,7 +1812,7 @@ fi
if [[ $backend == "all" || $backend == "arm32_cpu" || $backend == "arm32_fp16" ]]; then
# Run on armv82-a32-fp16
armv82_path=${release_path}/android_aarch32
file_name=$(ls ${armv82_path}/*inference-android-aarch32.tar.gz)
file_name=$(ls ${armv82_path}/*android-aarch32.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
@ -1623,7 +1826,7 @@ if [[ $backend == "all" || $backend == "arm32_cpu" || $backend == "arm32_fp32" ]
# Run on arm32
arm32_path=${release_path}/android_aarch32
# mv ${arm32_path}/*train-android-aarch32* ./train
file_name=$(ls ${arm32_path}/*inference-android-aarch32.tar.gz)
file_name=$(ls ${arm32_path}/*android-aarch32.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
@ -1637,7 +1840,7 @@ if [[ $backend == "all" || $backend == "arm64_cpu" || $backend == "arm64_fp32" ]
# Run on arm64
arm64_path=${release_path}/android_aarch64
# mv ${arm64_path}/*train-android-aarch64* ./train
file_name=$(ls ${arm64_path}/*inference-android-aarch64.tar.gz)
file_name=$(ls ${arm64_path}/*android-aarch64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
@ -1651,7 +1854,7 @@ if [[ $backend == "all" || $backend == "arm64_cpu" || $backend == "arm64_fp16" ]
# Run on arm64-fp16
arm64_path=${release_path}/android_aarch64
# mv ${arm64_path}/*train-android-aarch64* ./train
file_name=$(ls ${arm64_path}/*inference-android-aarch64.tar.gz)
file_name=$(ls ${arm64_path}/*android-aarch64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
@ -1710,4 +1913,9 @@ Print_Benchmark_Result
if [[ $isFailed == 1 ]]; then
exit 1
fi
echo "Run Arm Train is ended"
Print_Benchmark_Train_Result
if [[ $isFailed == 1 ]]; then
exit 1
fi
exit 0

View File

@ -4,8 +4,8 @@
function Run_Converter() {
# Unzip x86 runtime and converter
cd ${x86_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-inference-linux-x64/ || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-linux-x64/ || exit 1
cp tools/converter/converter/converter_lite ./ || exit 1
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./tools/converter/lib/:./tools/converter/third_party/glog/lib
@ -114,19 +114,19 @@ function Run_Converter() {
# Run on gpu platform:
function Run_gpu() {
cd ${arm64_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-android-aarch64.tar.gz || exit 1
tar -zxf mindspore-lite-${version}-android-aarch64.tar.gz || exit 1
# If build with minddata, copy the minddata related libs
cd ${benchmark_test_path} || exit 1
if [ -f ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/lib/libminddata-lite.so ]; then
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
if [ -f ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/lib/libminddata-lite.so ]; then
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
fi
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
# adb push all needed files to the phone
adb -s ${device_id} push ${benchmark_test_path} /data/local/tmp/ > adb_push_log.txt
@ -384,7 +384,7 @@ done
# mkdir train
x86_path=${release_path}/ubuntu_x86
file_name=$(ls ${x86_path}/*inference-linux-x64.tar.gz)
file_name=$(ls ${x86_path}/*linux-x64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
@ -446,7 +446,7 @@ if [[ $backend == "all" || $backend == "gpu" ]]; then
# Run on gpu
arm64_path=${release_path}/android_aarch64
# mv ${arm64_path}/*train-android-aarch64* ./train
file_name=$(ls ${arm64_path}/*inference-android-aarch64.tar.gz)
file_name=$(ls ${arm64_path}/*android-aarch64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}

View File

@ -4,8 +4,8 @@
function Run_Converter() {
# Unzip x86 runtime and converter
cd ${x86_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-inference-linux-x64/ || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-linux-x64/ || exit 1
cp tools/converter/converter/converter_lite ./ || exit 1
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./tools/converter/lib/:./tools/converter/third_party/glog/lib
@ -59,19 +59,19 @@ function Run_Converter() {
# Run on npu platform:
function Run_npu() {
cd ${arm64_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-android-aarch64.tar.gz || exit 1
tar -zxf mindspore-lite-${version}-android-aarch64.tar.gz || exit 1
# If build with minddata, copy the minddata related libs
cd ${benchmark_test_path} || exit 1
if [ -f ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/minddata/lib/libminddata-lite.so ]; then
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/minddata/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
if [ -f ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/minddata/lib/libminddata-lite.so ]; then
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/minddata/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
fi
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai.so ${benchmark_test_path}/libhiai.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai_ir.so ${benchmark_test_path}/libhiai_ir.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/third_party/hiai_ddk/lib/libhiai_ir_build.so ${benchmark_test_path}/libhiai_ir_build.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/inference/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-inference-android-aarch64/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/runtime/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm64_path}/mindspore-lite-${version}-android-aarch64/tools/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
# adb push all needed files to the phone
adb -s ${device_id} push ${benchmark_test_path} /data/local/tmp/ > adb_push_log.txt
@ -178,7 +178,7 @@ done
# mkdir train
x86_path=${release_path}/ubuntu_x86
file_name=$(ls ${x86_path}/*inference-linux-x64.tar.gz)
file_name=$(ls ${x86_path}/*linux-x64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
@ -235,7 +235,7 @@ if [[ $backend == "all" || $backend == "npu" ]]; then
# Run on npu
arm64_path=${release_path}/android_aarch64
# mv ${arm64_path}/*train-android-aarch64* ./train
file_name=$(ls ${arm64_path}/*inference-android-aarch64.tar.gz)
file_name=$(ls ${arm64_path}/*android-aarch64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}

View File

@ -1,11 +1,42 @@
#!/bin/bash
# Run Export on x86 platform and create output test files:
docker_image=mindspore_build:210301
function Run_Export(){
cd $models_path || exit 1
if [[ -z "${CLOUD_MODEL_ZOO}" ]]; then
echo "CLOUD_MODEL_ZOO is not defined - exiting export models"
exit 1
fi
# Export mindspore train models:
while read line; do
LFS=" " read -r -a line_array <<< ${line}
model_name=${line_array[0]}
if [[ $model_name == \#* ]]; then
continue
fi
echo ${model_name}'_train_export.py' >> "${export_log_file}"
echo 'exporting' ${model_name}
if [ -n "$docker_image" ]; then
echo 'docker run --user '"$(id -u):$(id -g)"' --env CLOUD_MODEL_ZOO=${CLOUD_MODEL_ZOO} -w $PWD --runtime=nvidia -v /home/$USER:/home/$USER -v /opt/share:/opt/share --privileged=true '${docker_image}' python '${models_path}'/'${model_name}'_train_export.py' >> "${export_log_file}"
docker run --user "$(id -u):$(id -g)" --env CLOUD_MODEL_ZOO=${CLOUD_MODEL_ZOO} -w $PWD --runtime=nvidia -v /home/$USER:/home/$USER -v /opt/share:/opt/share --privileged=true "${docker_image}" python ${models_path}'/'${model_name}_train_export.py "${epoch_num}"
else
echo 'CLOUD_MODEL_ZOO=${CLOUD_MODEL_ZOO} python '${models_path}'/'${model_name}'_train_export.py' >> "${export_log_file}"
CLOUD_MODEL_ZOO=${CLOUD_MODEL_ZOO} python ${models_path}'/'${model_name}_train_export.py "${epoch_num}"
fi
if [ $? = 0 ]; then
export_result='export mindspore '${model_name}'_train_export pass';echo ${export_result} >> ${export_result_file}
else
export_result='export mindspore '${model_name}'_train_export failed';echo ${export_result} >> ${export_result_file}
fi
done < ${models_ms_train_config}
}
# Run converter on x86 platform:
function Run_Converter() {
# Unzip x86 runtime and converter
cd ${x86_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-inference-linux-x64/ || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/mindspore-lite-${version}-linux-x64/ || exit 1
cp tools/converter/converter/converter_lite ./ || exit 1
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./tools/converter/lib/:./tools/converter/third_party/glog/lib
@ -95,7 +126,7 @@ function Run_Converter() {
fi
done < ${models_mindspore_config}
# Convert mindspore train models:
# Convert mindspore quant train models:
while read line; do
model_name=${line}
if [[ $model_name == \#* ]]; then
@ -111,6 +142,32 @@ function Run_Converter() {
fi
done < ${models_mindspore_train_config}
rm -rf ${ms_train_models_path}
mkdir -p ${ms_train_models_path}
# Convert mindspore train models:
while read line; do
LFS=" " read -r -a line_array <<< ${line}
WEIGHT_QUANT=""
model_prefix=${line_array[0]}'_train'
model_name=${line_array[0]}'_train'
if [[ $model_name == \#* ]]; then
continue
fi
if [[ "${line_array[1]}" == "weight_quant" ]]; then
WEIGHT_QUANT="--quantType=WeightQuant --bitNum=8 --quantWeightSize=0 --quantWeightChannel=0"
model_name=${line_array[0]}'_train_quant'
fi
echo ${model_name} >> "${run_converter_log_file}"
echo './converter_lite --fmk=MINDIR --modelFile='${train_models_path}'/'${model_prefix}'.mindir --outputFile='${ms_train_models_path}'/'${model_name}' --trainModel=true' ${WEIGHT_QUANT} >> "${run_converter_log_file}"
./converter_lite --fmk=MINDIR --modelFile=${train_models_path}/${model_prefix}.mindir --outputFile=${ms_train_models_path}/${model_name} --trainModel=true ${WEIGHT_QUANT}
if [ $? = 0 ]; then
converter_result='converter mindspore_train '${model_name}' pass';echo ${converter_result} >> ${run_converter_result_file}
else
converter_result='converter mindspore_train '${model_name}' failed';echo ${converter_result} >> ${run_converter_result_file};return 1
fi
done < ${models_ms_train_config}
# Convert TFLite PostTraining models:
while read line; do
posttraining_line_info=${line}
@ -344,9 +401,9 @@ function Run_Converter() {
# Run on x86 platform:
function Run_x86() {
echo 'cd '${x86_path}'/mindspore-lite-'${version}'-inference-linux-x64' >> "${run_x86_log_file}"
cd ${x86_path}/mindspore-lite-${version}-inference-linux-x64 || return 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./inference/lib
echo 'cd '${x86_path}'/mindspore-lite-'${version}'-linux-x64' >> "${run_x86_log_file}"
cd ${x86_path}/mindspore-lite-${version}-linux-x64 || return 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib:./runtime/third_party/libjpeg-turbo/lib
cp tools/benchmark/benchmark ./ || exit 1
# Run tf converted models:
@ -666,14 +723,48 @@ function Run_x86() {
run_result='x86: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_result_file}; return 1
fi
done < ${models_for_process_only_config}
# Run mindspore converted train models:
fail=0
while read line; do
LFS=" " read -r -a line_array <<< ${line}
model_prefix=${line_array[0]}
model_name=${line_array[0]}'_train'
accuracy_limit=0.5
if [[ $model_name == \#* ]]; then
continue
fi
if [[ "${line_array[1]}" == "weight_quant" ]]; then
model_name=${line_array[0]}'_train_quant'
accuracy_limit=${line_array[2]}
fi
export_file="${ms_train_models_path}/${model_name}_tod"
inference_file="${ms_train_models_path}/${model_name}_infer"
rm -f ${inference_file}"*"
rm -f ${export_file}"*"
echo ${model_name} >> "${run_x86_log_file}"
${run_valgrind}./tools/benchmark_train/benchmark_train \
--modelFile=${ms_train_models_path}/${model_name}.ms \
--inDataFile=${train_io_path}/${model_prefix}_input \
--expectedDataFile=${train_io_path}/${model_prefix}_output --epochs=${epoch_num} --numThreads=${threads} \
--accuracyThreshold=${accuracy_limit} --inferenceFile=${inference_file} \
--exportFile=${export_file} >> "${run_x86_log_file}"
if [ $? = 0 ]; then
run_result='x86_train: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_train_result_file}
else
run_result='x86_train: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_train_result_file}
fail=1
fi
done < ${models_ms_train_config}
return ${fail}
}
# Run on x86 sse platform:
function Run_x86_sse() {
cd ${x86_path}/sse || exit 1
tar -zxf mindspore-lite-${version}-inference-linux-x64.tar.gz || exit 1
cd ${x86_path}/sse/mindspore-lite-${version}-inference-linux-x64 || return 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./inference/lib
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/sse/mindspore-lite-${version}-linux-x64 || return 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib
cp tools/benchmark/benchmark ./ || exit 1
# Run tflite converted models:
@ -943,9 +1034,9 @@ function Run_x86_sse() {
# Run on x86 avx platform:
function Run_x86_avx() {
cd ${x86_path}/avx || exit 1
tar -zxf mindspore-lite-${version}-inference-linux-x64.tar.gz || exit 1
cd ${x86_path}/avx/mindspore-lite-${version}-inference-linux-x64 || return 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./inference/lib
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/avx/mindspore-lite-${version}-linux-x64 || return 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib
cp tools/benchmark/benchmark ./ || exit 1
# Run tflite converted models:
@ -1215,11 +1306,11 @@ function Run_x86_avx() {
# Run on x86 java platform:
function Run_x86_java() {
cd ${x86_java_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-linux-x64.tar.gz || exit 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${x86_java_path}/mindspore-lite-${version}-inference-linux-x64/inference/lib
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${x86_java_path}/mindspore-lite-${version}-linux-x64/runtime/lib
# compile benchmark
echo "javac -cp ${x86_java_path}/mindspore-lite-${version}-inference-linux-x64/inference/lib/mindspore-lite-java.jar ${basepath}/java/src/main/java/Benchmark.java -d ."
javac -cp ${x86_java_path}/mindspore-lite-${version}-inference-linux-x64/inference/lib/mindspore-lite-java.jar ${basepath}/java/src/main/java/Benchmark.java -d .
echo "javac -cp ${x86_java_path}/mindspore-lite-${version}-linux-x64/runtime/lib/mindspore-lite-java.jar ${basepath}/java/src/main/java/Benchmark.java -d ."
javac -cp ${x86_java_path}/mindspore-lite-${version}-linux-x64/runtime/lib/mindspore-lite-java.jar ${basepath}/java/src/main/java/Benchmark.java -d .
count=0
# Run tflite converted models:
@ -1234,8 +1325,8 @@ function Run_x86_java() {
continue
fi
echo ${model_name} >> "${run_x86_java_log_file}"
echo "java -classpath .:${x86_java_path}/mindspore-lite-${version}-inference-linux-x64/inference/lib/mindspore-lite-java.jar Benchmark ${ms_models_path}/${model_name}.ms '${models_path}'/input_output/input/${model_name}.ms.bin '${models_path}'/input_output/output/${model_name}.ms.out 1" >> "${run_x86_java_log_file}"
java -classpath .:${x86_java_path}/mindspore-lite-${version}-inference-linux-x64/inference/lib/mindspore-lite-java.jar Benchmark ${ms_models_path}/${model_name}.ms ${models_path}/input_output/input/${model_name}.ms.bin ${models_path}/input_output/output/${model_name}.ms.out 1
echo "java -classpath .:${x86_java_path}/mindspore-lite-${version}-linux-x64/runtime/lib/mindspore-lite-java.jar Benchmark ${ms_models_path}/${model_name}.ms '${models_path}'/input_output/input/${model_name}.ms.bin '${models_path}'/input_output/output/${model_name}.ms.out 1" >> "${run_x86_java_log_file}"
java -classpath .:${x86_java_path}/mindspore-lite-${version}-linux-x64/runtime/lib/mindspore-lite-java.jar Benchmark ${ms_models_path}/${model_name}.ms ${models_path}/input_output/input/${model_name}.ms.bin ${models_path}/input_output/output/${model_name}.ms.out 1
if [ $? = 0 ]; then
run_result='x86_java: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
else
@ -1246,7 +1337,7 @@ function Run_x86_java() {
# Run on x86 codegen benchmark
function Run_x86_codegen() {
local CODEGEN_PATH=${x86_path}/mindspore-lite-${version}-inference-linux-x64/tools/codegen
local CODEGEN_PATH=${x86_path}/mindspore-lite-${version}-linux-x64/tools/codegen
rm -rf ${build_path}
mkdir -p ${build_path}
@ -1260,7 +1351,7 @@ function Run_x86_codegen() {
${CODEGEN_PATH}/codegen --codePath=${build_path} --modelPath=${ms_models_path}/${model_name}.ms >> ${run_x86_codegen_log_file}
# 1. build benchmark
mkdir -p ${build_path}/${model_name}/build && cd ${build_path}/${model_name}/build || exit 1
cmake -DPKG_PATH=${x86_path}/mindspore-lite-${version}-inference-linux-x64 ${build_path}/${model_name} >> ${run_x86_codegen_log_file}
cmake -DPKG_PATH=${x86_path}/mindspore-lite-${version}-linux-x64 ${build_path}/${model_name} >> ${run_x86_codegen_log_file}
make >> ${run_x86_codegen_log_file}
# 2. run benchmark
echo "net file: ${build_path}/${model_name}/src/net.bin" >> ${run_x86_codegen_log_file}
@ -1278,7 +1369,7 @@ function Run_x86_codegen() {
# Run on x86 codegen benchmark parallel
function Run_x86_codegen_parallel() {
local CODEGEN_PATH=${x86_path}/mindspore-lite-${version}-inference-linux-x64/tools/codegen
local CODEGEN_PATH=${x86_path}/mindspore-lite-${version}-linux-x64/tools/codegen
rm -rf ${build_parallal_path}
mkdir -p ${build_parallal_path}
@ -1292,7 +1383,7 @@ function Run_x86_codegen_parallel() {
${CODEGEN_PATH}/codegen --codePath=${build_parallal_path} --modelPath=${ms_models_path}/${model_name}.ms --supportParallel=true >> ${run_x86_codegen_parallel_log_file}
# 1. build benchmark
mkdir -p ${build_parallal_path}/${model_name}/build && cd ${build_parallal_path}/${model_name}/build || exit 1
cmake -DPKG_PATH=${x86_path}/mindspore-lite-${version}-inference-linux-x64 ${build_parallal_path}/${model_name} >> ${run_x86_codegen_parallel_log_file}
cmake -DPKG_PATH=${x86_path}/mindspore-lite-${version}-linux-x64 ${build_parallal_path}/${model_name} >> ${run_x86_codegen_parallel_log_file}
make >> ${run_x86_codegen_parallel_log_file}
# 2. run benchmark
echo "net file: ${build_parallal_path}/${model_name}/src/net.bin" >> ${run_x86_codegen_parallel_log_file}
@ -1339,12 +1430,46 @@ function Print_Benchmark_Result() {
MS_PRINT_TESTCASE_END_MSG
}
function Print_Benchmark_Train_Result() {
MS_PRINT_TESTCASE_START_MSG
while read line; do
arr=("${line}")
printf "%-20s %-100s %-7s\n" ${arr[0]} ${arr[1]} ${arr[2]}
done < ${run_benchmark_train_result_file}
MS_PRINT_TESTCASE_END_MSG
}
basepath=$(pwd)
echo ${basepath}
#set -e
logs_path=${basepath}/logs_train
rm -rf ${logs_path}
mkdir -p ${logs_path}
# Export model if enabled
if [[ $enable_export == 1 ]]; then
echo "Start Exporting models ..."
# Write export result to temp file
export_log_file=${logs_path}/export_log.txt
echo ' ' > ${export_log_file}
export_result_file=${logs_path}/export_result.txt
echo ' ' > ${export_result_file}
# Run export
Run_Export
Print_Result ${export_result_file}
fi
# Write benchmark_train result to temp file
run_benchmark_train_result_file=${logs_path}/run_benchmark_train_result.txt
echo ' ' > ${run_benchmark_train_result_file}
epoch_num=1
threads=2
train_models_path=""
train_io_path=""
# Example:sh run_benchmark_x86.sh -r /home/temp_test -m /home/temp_test/models -e arm_cpu
while getopts "r:m:e:" opt; do
while getopts "r:m:M:e:i:v:p:t:" opt; do
case ${opt} in
r)
release_path=${OPTARG}
@ -1354,10 +1479,31 @@ while getopts "r:m:e:" opt; do
models_path=${OPTARG}
echo "models_path is ${OPTARG}"
;;
M)
train_models_path=${OPTARG}
echo "train_models_path is ${models_path}"
;;
e)
backend=${OPTARG}
echo "backend is ${OPTARG}"
;;
i)
train_io_path=${OPTARG}
echo "train_io_path is ${OPTARG}"
;;
v)
run_valgrind="valgrind --log-file=valgrind.log "
echo "Run x86 with valgrind"
;;
t)
epoch_num=${OPTARG}
echo "train epoch num is ${epoch_num}"
;;
p)
enable_export=1
docker_image=${OPTARG}
echo "enable_export = 1, docker_image = ${OPTARG}"
;;
?)
echo "unknown para"
exit 1;;
@ -1367,7 +1513,7 @@ done
# mkdir train
x86_path=${release_path}/ubuntu_x86
file_name=$(ls ${x86_path}/*inference-linux-x64.tar.gz)
file_name=$(ls ${x86_path}/*linux-x64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
@ -1390,10 +1536,26 @@ models_for_process_only_config=${basepath}/../config/models_for_process_only.cfg
models_tf_weightquant_config=${basepath}/../config/models_tf_weightquant.cfg
models_codegen_config=${basepath}/../codegen/models_codegen.cfg
models_codegen_parallel_config=${basepath}/../codegen/models_codegen_parallel.cfg
models_ms_train_config=${basepath}/../config/models_ms_train.cfg
ms_models_path=${basepath}/ms_models
ms_train_models_path=${basepath}/ms_train_models
rm -rf ${ms_train_models_path}
mkdir -p ${ms_train_models_path}
build_path=${basepath}/codegen_build
build_parallal_path=${basepath}/codegen_parallel_build
if [[ $train_models_path == "" ]]
then
echo "train_io path is empty"
train_models_path="${models_path}/../../models_train"
fi
echo $train_models_path
if [[ $train_io_path == "" ]]
then
echo "train_io path is empty"
train_io_path=${train_models_path}/input_output
fi
echo $train_io_path
# Write converter result to temp file
run_converter_log_file=${basepath}/run_converter_log.txt
@ -1564,4 +1726,9 @@ Print_Benchmark_Result
if [[ $isFailed == 1 ]]; then
exit 1
fi
echo "Run x86 train end"
Print_Benchmark_Train_Result
if [[ $isFailed == 1 ]]; then
exit 1
fi
exit 0

View File

@ -24,16 +24,16 @@ function Print_Cropper_Result() {
function Run_cropper() {
cd ${arm64_path} || exit 1
tar -zxf mindspore-lite-${version}-inference-android-aarch64.tar.gz || exit 1
cd mindspore-lite-${version}-inference-android-aarch64 || exit 1
cp -a ./inference/third_party/hiai_ddk/lib/libhiai.so "${cropper_test_path}"/libhiai.so || exit 1
cp -a ./inference/third_party/hiai_ddk/lib/libhiai_ir.so "${cropper_test_path}"/libhiai_ir.so || exit 1
cp -a ./inference/third_party/hiai_ddk/lib/libhiai_ir_build.so "${cropper_test_path}"/libhiai_ir_build.so || exit 1
tar -zxf mindspore-lite-${version}-android-aarch64.tar.gz || exit 1
cd mindspore-lite-${version}-android-aarch64 || exit 1
cp -a ./runtime/third_party/hiai_ddk/lib/libhiai.so "${cropper_test_path}"/libhiai.so || exit 1
cp -a ./runtime/third_party/hiai_ddk/lib/libhiai_ir.so "${cropper_test_path}"/libhiai_ir.so || exit 1
cp -a ./runtime/third_party/hiai_ddk/lib/libhiai_ir_build.so "${cropper_test_path}"/libhiai_ir_build.so || exit 1
cp -a ./inference/lib/libmindspore-lite.a "${cropper_test_path}"/libmindspore-lite.a || exit 1
cp -a ./runtime/lib/libmindspore-lite.a "${cropper_test_path}"/libmindspore-lite.a || exit 1
cp -a ./tools/benchmark/benchmark "${cropper_test_path}"/benchmark || exit 1
cp -r "${x86_path}"/mindspore-lite-${version}-inference-linux-x64/tools/cropper/ "${cropper_test_path}" || exit 1
cp -r "${x86_path}"/mindspore-lite-${version}-linux-x64/tools/cropper/ "${cropper_test_path}" || exit 1
cd "${cropper_test_path}" || exit 1
echo "${cropper_test_path}"
@ -137,7 +137,7 @@ echo ' ' > "${run_converter_log_file}"
run_converter_result_file="${basepath}"/run_converter_result.txt
echo ' ' > "${run_converter_result_file}"
file_name=$(ls "${x86_path}"/*inference-linux-x64.tar.gz)
file_name=$(ls "${x86_path}"/*linux-x64.tar.gz)
IFS="-" read -r -a file_name_array <<< "$file_name"
version=${file_name_array[2]}
ms_models_path=${basepath}/ms_models

View File

@ -70,7 +70,7 @@ for /f "tokens=1-2 delims= " %%i in (%MODEL_CONFIG%) do (
)
echo "Run converted models"
copy %DST_PACKAGE_PATH%\inference\lib\* %DST_PACKAGE_PATH%\tools\benchmark\
copy %DST_PACKAGE_PATH%\runtime\lib\* %DST_PACKAGE_PATH%\tools\benchmark\
cd /d %DST_PACKAGE_PATH%\tools\benchmark\
SET INPUT_BASE=%MODEL_PATH%/input_output/input

View File

@ -16,112 +16,36 @@
set -e
function verify_every_file() {
for full_file in "$1"/*
do
if [ -d ${full_file} ]
then
verify_every_file ${full_file} $2
else
echo "check: ${full_file}"
exist_and_equal="false"
src_sha256=`sha256sum ${full_file} | cut -d" " -f1`
file_name=$(basename ${full_file})
find_result=`find $2 -name ${file_name} -type f`
for same_name_file in ${find_result}
do
dst_sha256=`sha256sum ${same_name_file} | cut -d" " -f1`
if [ ${src_sha256} == ${dst_sha256} ]
then
echo " dst: ${same_name_file}"
exist_and_equal="true"
fi
done
if [ ${exist_and_equal} == "false" ]
then
echo " check failed!"
exit 1
fi
fi
done
}
function android_release_package()
{
arch=$1
device=$2
src_inference_pkg_name="mindspore-lite-${version}-inference-android-${arch}"
src_train_pkg_name="mindspore-lite-${version}-train-android-${arch}"
dst_pkg_name="mindspore-lite-${version}-android-${arch}"
rm -rf ${src_inference_pkg_name}
rm -rf ${src_train_pkg_name}
rm -rf ${dst_pkg_name}
tar -xzf ${input_path}/android_${arch}/${device}/${src_inference_pkg_name}.tar.gz
tar -xzf ${input_path}/android_${arch}/${device}/${src_train_pkg_name}.tar.gz
cp -r ${src_train_pkg_name}/tools/benchmark_train/ ${src_inference_pkg_name}/tools/
cp -r ${src_train_pkg_name}/train/ ${src_inference_pkg_name}/
mkdir -p ${output_path}/release/android/${device}/
mv ${src_inference_pkg_name} ${dst_pkg_name}
# Copy java runtime to Android package
cp ${input_path}/aar/avx/mindspore-lite-*maven*.zip ${dst_pkg_name}
tar -czf ${output_path}/release/android/${device}/${dst_pkg_name}.tar.gz ${dst_pkg_name}
mv ${input_path}/android_${arch}/${device}/${dst_pkg_name}.tar.gz ${output_path}/release/android/${device}/${dst_pkg_name}.tar.gz
cd ${output_path}/release/android/${device}/
sha256sum ${dst_pkg_name}.tar.gz > ${dst_pkg_name}.tar.gz.sha256
cd -
verify_every_file ${src_train_pkg_name}/tools/benchmark_train/ ${dst_pkg_name}
verify_every_file ${src_train_pkg_name}/train/ ${dst_pkg_name}
rm -rf ${src_train_pkg_name}
rm -rf ${dst_pkg_name}
}
function linux_release_package()
{
src_inference_pkg_name="mindspore-lite-${version}-inference-linux-x64"
src_train_pkg_name="mindspore-lite-${version}-train-linux-x64"
dst_pkg_name="mindspore-lite-${version}-linux-x64"
rm -rf ${src_inference_pkg_name}
rm -rf ${src_train_pkg_name}
rm -rf ${dst_pkg_name}
tar -xzf ${input_path}/ubuntu_x86/avx/${src_inference_pkg_name}.tar.gz
tar -xzf ${input_path}/ubuntu_x86/${src_train_pkg_name}.tar.gz
cp -r ${src_train_pkg_name}/tools/benchmark_train/ ${src_inference_pkg_name}/tools/
cp -r ${src_train_pkg_name}/train/ ${src_inference_pkg_name}/
mkdir -p ${output_path}/release/linux/
mv ${src_inference_pkg_name} ${dst_pkg_name}
tar -czf ${output_path}/release/linux/${dst_pkg_name}.tar.gz ${dst_pkg_name}
mv ${input_path}/ubuntu_x86/${dst_pkg_name}.tar.gz ${output_path}/release/linux/
cd ${output_path}/release/linux/
sha256sum ${dst_pkg_name}.tar.gz > ${dst_pkg_name}.tar.gz.sha256
cd -
verify_every_file ${src_train_pkg_name}/tools/benchmark_train/ ${dst_pkg_name}
verify_every_file ${src_train_pkg_name}/train/ ${dst_pkg_name}
rm -rf ${src_train_pkg_name}
rm -rf ${dst_pkg_name}
}
function windows_release_package()
{
src_inference_pkg_name="mindspore-lite-${version}-inference-win-x64"
dst_pkg_name="mindspore-lite-${version}-win-x64"
pkg_name="mindspore-lite-${version}-win-x64"
rm -rf ${src_inference_pkg_name}
rm -rf ${dst_pkg_name}
unzip ${input_path}/windows_x64/avx/${src_inference_pkg_name}.zip
mv ${src_inference_pkg_name} ${dst_pkg_name}
mkdir -p ${output_path}/release/windows/
zip -r ${output_path}/release/windows/${dst_pkg_name}.zip ${dst_pkg_name}
rm -rf ${pkg_name}
mv ${input_path}/windows_x64/avx/${pkg_name}.zip ${output_path}/release/windows/${dst_pkg_name}.zip
cd ${output_path}/release/windows/
sha256sum ${dst_pkg_name}.zip > ${dst_pkg_name}.zip.sha256
cd -
rm -rf ${dst_pkg_name}
}
echo "============================== begin =============================="
@ -129,7 +53,7 @@ echo "Usage: bash lite_release_package.sh input_path output_path"
input_path=$1
output_path=$2
version=`ls ${input_path}/android_aarch64/mindspore-lite-*-inference-*.tar.gz | awk -F'/' '{print $NF}' | cut -d"-" -f3`
version=`ls ${input_path}/android_aarch64/mindspore-lite-*-*.tar.gz | awk -F'/' '{print $NF}' | cut -d"-" -f3`
android_release_package aarch32
android_release_package aarch64