!47004 MSLite, update cloud inference example

Merge pull request !47004 from 徐永飞/lite_example
This commit is contained in:
i-robot 2022-12-21 02:41:59 +00:00 committed by Gitee
commit b31639b7ed
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
11 changed files with 61 additions and 129 deletions

View File

@ -1,5 +1,5 @@
#!/bin/bash
# Copyright 2021 Huawei Technologies Co., Ltd
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.

View File

@ -0,0 +1,21 @@
cmake_minimum_required(VERSION 3.14)
project(QuickStartCpp)
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.3.0)
message(FATAL_ERROR "GCC version ${CMAKE_CXX_COMPILER_VERSION} must not be less than 7.3.0")
endif()
if(DEFINED ENV{LITE_HOME})
set(LITE_HOME $ENV{LITE_HOME})
endif()
# Add directory to include search path
include_directories(${LITE_HOME}/runtime)
# Add directory to linker search path
link_directories(${LITE_HOME}/runtime/lib)
link_directories(${LITE_HOME}/tools/converter/lib)
file(GLOB_RECURSE QUICK_START_CXX ${CMAKE_CURRENT_SOURCE_DIR}/*.cc)
add_executable(mindspore_quick_start_cpp ${QUICK_START_CXX})
target_link_libraries(mindspore_quick_start_cpp mindspore-lite pthread dl)

View File

@ -19,24 +19,22 @@ get_version() {
VERSION_STR=$(cat ${BASEPATH}/../../../../version.txt)
}
get_version
MODEL_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/quick_start/mobilenetv2.ms"
MODEL_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/quick_start/mobilenetv2.mindir"
MINDSPORE_FILE_NAME="mindspore-lite-${VERSION_STR}-linux-x64"
MINDSPORE_FILE="${MINDSPORE_FILE_NAME}.tar.gz"
MINDSPORE_LITE_DOWNLOAD_URL="https://ms-release.obs.cn-north-4.myhuaweicloud.com/${VERSION_STR}.B310/MindSpore/lite/release/linux/x86_64/server/${MINDSPORE_FILE}"
MINDSPORE_LITE_DOWNLOAD_URL="https://ms-release.obs.cn-north-4.myhuaweicloud.com/${VERSION_STR}/MindSpore/lite/release/linux/centos_x86/cloud_fusion/${MINDSPORE_FILE}"
mkdir -p build || exit
mkdir -p lib || exit
mkdir -p model || exit
if [ ! -e ${BASEPATH}/model/mobilenetv2.ms ]; then
wget -c -O ${BASEPATH}/model/mobilenetv2.ms --no-check-certificate ${MODEL_DOWNLOAD_URL}
mkdir -p model
if [ ! -e ${BASEPATH}/model/mobilenetv2.mindir ]; then
wget -c -O ${BASEPATH}/model/mobilenetv2.mindir --no-check-certificate ${MODEL_DOWNLOAD_URL}
fi
if [ ! -e ${BASEPATH}/build/${MINDSPORE_FILE} ]; then
wget -c -O ${BASEPATH}/build/${MINDSPORE_FILE} --no-check-certificate ${MINDSPORE_LITE_DOWNLOAD_URL}
if [ ! -e ${BASEPATH}/${MINDSPORE_FILE} ]; then
wget -c -O ${BASEPATH}/${MINDSPORE_FILE} --no-check-certificate ${MINDSPORE_LITE_DOWNLOAD_URL}
fi
tar xzvf ${BASEPATH}/build/${MINDSPORE_FILE} -C ${BASEPATH}/build/
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/runtime/lib/libmindspore-lite.a ${BASEPATH}/lib
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/runtime/third_party/glog/* ${BASEPATH}/lib
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/runtime/include ${BASEPATH}/
cd ${BASEPATH}/build || exit
cmake ${BASEPATH}
tar xzvf ${BASEPATH}/${MINDSPORE_FILE}
export LITE_HOME=${BASEPATH}/${MINDSPORE_FILE_NAME}
mkdir -p build
cd build || exit
cmake ..
make

View File

@ -30,61 +30,6 @@ constexpr int kNumWorkers = 2;
constexpr int kElementsNum = 1001;
constexpr int64_t MAX_MALLOC_SIZE = static_cast<size_t>(2000) * 1024 * 1024;
} // namespace
std::string RealPath(const char *path) {
const size_t max = 4096;
if (path == nullptr) {
std::cerr << "path is nullptr" << std::endl;
return "";
}
if ((strlen(path)) >= max) {
std::cerr << "path is too long" << std::endl;
return "";
}
auto resolved_path = std::make_unique<char[]>(max);
if (resolved_path == nullptr) {
std::cerr << "new resolved_path failed" << std::endl;
return "";
}
#ifdef _WIN32
char *real_path = _fullpath(resolved_path.get(), path, 1024);
#else
char *real_path = realpath(path, resolved_path.get());
#endif
if (real_path == nullptr || strlen(real_path) == 0) {
std::cerr << "file path is not valid : " << path << std::endl;
return "";
}
std::string res = resolved_path.get();
return res;
}
char *ReadFile(const char *file, size_t *size) {
if (file == nullptr) {
std::cerr << "file is nullptr." << std::endl;
return nullptr;
}
std::ifstream ifs(file, std::ifstream::in | std::ifstream::binary);
if (!ifs.good()) {
std::cerr << "file: " << file << " is not exist." << std::endl;
return nullptr;
}
if (!ifs.is_open()) {
std::cerr << "file: " << file << " open failed." << std::endl;
return nullptr;
}
ifs.seekg(0, std::ios::end);
*size = ifs.tellg();
std::unique_ptr<char[]> buf(new (std::nothrow) char[*size]);
if (buf == nullptr) {
std::cerr << "malloc buf failed, file: " << file << std::endl;
ifs.close();
return nullptr;
}
ifs.seekg(0, std::ios::beg);
ifs.read(buf.get(), *size);
ifs.close();
return buf.release();
}
template <typename T, typename Distribution>
void GenerateRandomData(int size, void *data, Distribution distribution) {
@ -120,9 +65,9 @@ int QuickStart(int argc, const char **argv) {
std::cerr << "Model file must be provided.\n";
return -1;
}
auto model_path = RealPath(argv[1]);
std::string model_path = argv[1];
if (model_path.empty()) {
std::cerr << "Model path " << argv[1] << " is invalid.";
std::cerr << "Model path " << model_path << " is invalid.";
return -1;
}
@ -202,7 +147,8 @@ int QuickStart(int argc, const char **argv) {
<< " tensor elements num is:" << tensor.ElementNum() << std::endl;
auto out_data = reinterpret_cast<const float *>(tensor.Data().get());
std::cout << "output data is:";
for (int i = 0; i < tensor.ElementNum() && i <= 50; i++) {
constexpr int print_max = 50;
for (int i = 0; i < tensor.ElementNum() && i <= print_max; i++) {
std::cout << out_data[i] << " ";
}
std::cout << std::endl;

View File

@ -16,25 +16,30 @@
BASEPATH=$(cd "$(dirname $0)" || exit; pwd)
get_version() {
VERSION_STR=$(cat ${BASEPATH}/../../../../version.txt)
VERSION_STR=$(cat ${BASEPATH}/../../../../../version.txt)
}
get_version
MODEL_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/quick_start/mobilenetv2.ms"
MODEL_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/quick_start/mobilenetv2.mindir"
MINDSPORE_FILE_NAME="mindspore-lite-${VERSION_STR}-linux-x64"
MINDSPORE_FILE="${MINDSPORE_FILE_NAME}.tar.gz"
MINDSPORE_LITE_DOWNLOAD_URL="https://ms-release.obs.cn-north-4.myhuaweicloud.com/${VERSION_STR}.B310/MindSpore/lite/release/linux/x86_64/server/${MINDSPORE_FILE}"
MINDSPORE_LITE_DOWNLOAD_URL="https://ms-release.obs.cn-north-4.myhuaweicloud.com/${VERSION_STR}/MindSpore/lite/release/linux/x86_64/${MINDSPORE_FILE}"
mkdir -p build
mkdir -p lib
mkdir -p lib/runtime
mkdir -p lib/tools/converter
mkdir -p model
if [ ! -e ${BASEPATH}/model/mobilenetv2.ms ]; then
wget -c -O ${BASEPATH}/model/mobilenetv2.ms --no-check-certificate ${MODEL_DOWNLOAD_URL}
if [ ! -e ${BASEPATH}/model/mobilenetv2.mindir ]; then
wget -c -O ${BASEPATH}/model/mobilenetv2.mindir --no-check-certificate ${MODEL_DOWNLOAD_URL}
fi
if [ ! -e ${BASEPATH}/build/${MINDSPORE_FILE} ]; then
wget -c -O ${BASEPATH}/build/${MINDSPORE_FILE} --no-check-certificate ${MINDSPORE_LITE_DOWNLOAD_URL}
fi
tar xzvf ${BASEPATH}/build/${MINDSPORE_FILE} -C ${BASEPATH}/build/
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/runtime/lib/* ${BASEPATH}/lib
# copy shared libraries to lib and add to LD_LIBRARY_PATH
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/runtime/lib/ ${BASEPATH}/lib/runtime/
cp -r ${BASEPATH}/build/${MINDSPORE_FILE_NAME}/tools/converter/lib/ ${BASEPATH}/lib/tools/converter/
export LD_LIBRARY_PATH=${BASEPATH}/lib/runtime/lib:${BASEPATH}/lib/tools/converter/lib:$LD_LIBRARY_PATH
cd ${BASEPATH}/ || exit
mvn package

View File

@ -5,7 +5,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>com.mindspore.lite.demo</groupId>
<artifactId>quick_start_server_inference_java</artifactId>
<artifactId>quick_start_parallel_java</artifactId>
<version>1.0</version>
<properties>
@ -20,7 +20,7 @@
<artifactId>mindspore-lite-java</artifactId>
<version>1.0</version>
<scope>system</scope>
<systemPath>${project.basedir}/lib/mindspore-lite-java.jar</systemPath>
<systemPath>${project.basedir}/lib/runtime/lib/mindspore-lite-java.jar</systemPath>
</dependency>
</dependencies>

View File

@ -66,19 +66,18 @@ pip install numpy wheel
# Download model and input data file
BASEPATH=$(cd "$(dirname $0)" || exit; pwd)
MODEL_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/quick_start/mobilenetv2.ms"
MODEL_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/quick_start/mobilenetv2.mindir"
INPUT_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/quick_start/input.bin"
mkdir -p model
if [ ! -e ${BASEPATH}/model/mobilenetv2.ms ]; then
wget -c -O ${BASEPATH}/model/mobilenetv2.ms --no-check-certificate ${MODEL_DOWNLOAD_URL}
if [ ! -e ${BASEPATH}/model/mobilenetv2.mindir ]; then
wget -c -O ${BASEPATH}/model/mobilenetv2.mindir --no-check-certificate ${MODEL_DOWNLOAD_URL}
fi
if [ ! -e ${BASEPATH}/model/input.bin ]; then
wget -c -O ${BASEPATH}/model/input.bin --no-check-certificate ${INPUT_DOWNLOAD_URL}
fi
# Reinstall MindSpore Lite whl package
arch=`uname -m`
mindspore_lite_whl=`ls ${BASEPATH}/mindspore_lite*.whl`
if [ -f "${mindspore_lite_whl}" ]; then
echo "==========[INFO]MindSpore Lite Whl found, install the current directory's package.=========="
@ -87,7 +86,7 @@ if [ -f "${mindspore_lite_whl}" ]; then
else
echo "==========[INFO]MindSpore Lite Whl not found, install package from the network.=========="
python -m pip uninstall -y mindspore_lite
python -m pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MINDSPORE_LITE_VERSION}/MindSpore/lite/release/linux/${arch}/server/mindspore_lite-${MINDSPORE_LITE_VERSION/-/}-cp37-cp37m-linux_${arch}.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple
python -m pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${MINDSPORE_LITE_VERSION}/MindSpore/lite/release/centos_x86/cloud_fusion/mindspore_lite-${MINDSPORE_LITE_VERSION}-cp37-cp37m-linux_x86.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com -i https://pypi.tuna.tsinghua.edu.cn/simple
fi
# Check MindSpore Lite installation
python -c "import mindspore_lite"

View File

@ -82,7 +82,7 @@ context.append_device_info(cpu_device_info)
parallel_runner_config = mslite.RunnerConfig(context=context, workers_num=WORKERS_NUM)
# Build ModelParallelRunner from file
model_parallel_runner = mslite.ModelParallelRunner()
model_parallel_runner.init(model_path="./model/mobilenetv2.ms", runner_config=parallel_runner_config)
model_parallel_runner.init(model_path="./model/mobilenetv2.mindir", runner_config=parallel_runner_config)
# The server creates 5 threads to store the inference tasks of 5 clients.
threads = []
total_start_time = time.time()

View File

@ -66,12 +66,12 @@ pip install numpy wheel
# Download model and input data file
BASEPATH=$(cd "$(dirname $0)" || exit; pwd)
MODEL_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/quick_start/mobilenetv2.ms"
MODEL_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/quick_start/mobilenetv2.mindir"
INPUT_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/quick_start/input.bin"
mkdir -p model
if [ ! -e ${BASEPATH}/model/mobilenetv2.ms ]; then
wget -c -O ${BASEPATH}/model/mobilenetv2.ms --no-check-certificate ${MODEL_DOWNLOAD_URL}
if [ ! -e ${BASEPATH}/model/mobilenetv2.mindir ]; then
wget -c -O ${BASEPATH}/model/mobilenetv2.mindir --no-check-certificate ${MODEL_DOWNLOAD_URL}
fi
if [ ! -e ${BASEPATH}/model/input.bin ]; then
wget -c -O ${BASEPATH}/model/input.bin --no-check-certificate ${INPUT_DOWNLOAD_URL}

View File

@ -1,37 +0,0 @@
cmake_minimum_required(VERSION 3.14)
project(QuickStartCpp)
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.3.0)
message(FATAL_ERROR "GCC version ${CMAKE_CXX_COMPILER_VERSION} must not be less than 7.3.0")
endif()
# Add directory to include search path
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
# Add directory to linker search path
link_directories(${CMAKE_CURRENT_SOURCE_DIR}/lib)
file(GLOB_RECURSE QUICK_START_CXX ${CMAKE_CURRENT_SOURCE_DIR}/*.cc)
add_executable(mindspore_quick_start_cpp ${QUICK_START_CXX})
target_link_libraries(
mindspore_quick_start_cpp
-Wl,--whole-archive mindspore-lite -Wl,--no-whole-archive
${CMAKE_CURRENT_SOURCE_DIR}/lib/libmindspore_glog.so.0
pthread
)
# Due to the increased compilation options for stack protection,
# it is necessary to target link ssp library when Use the static library in Windows.
if(WIN32)
target_link_libraries(
mindspore_quick_start_cpp
ssp
)
else()
target_link_libraries(
mindspore_quick_start_cpp
dl
)
endif()