forked from mindspore-Ecosystem/mindspore
add quick start demo
This commit is contained in:
parent
4ed332f28f
commit
36e1dee219
|
@ -24,6 +24,34 @@
|
|||
#include "include/context.h"
|
||||
#include "include/lite_session.h"
|
||||
|
||||
std::string RealPath(const char *path) {
|
||||
size_t PATH_MAX = 4096;
|
||||
if (path == nullptr) {
|
||||
std::cerr << "path is nullptr" << std::endl;
|
||||
return "";
|
||||
}
|
||||
if ((strlen(path)) >= PATH_MAX) {
|
||||
std::cerr << "path is too long" << std::endl;
|
||||
return "";
|
||||
}
|
||||
auto resolved_path = std::make_unique<char[]>(PATH_MAX);
|
||||
if (resolved_path == nullptr) {
|
||||
std::cerr << "new resolved_path failed" << std::endl;
|
||||
return "";
|
||||
}
|
||||
#ifdef _WIN32
|
||||
char *real_path = _fullpath(resolved_path.get(), path, 1024);
|
||||
#else
|
||||
char *real_path = realpath(path, resolved_path.get());
|
||||
#endif
|
||||
if (real_path == nullptr || strlen(real_path) == 0) {
|
||||
std::cerr << "file path is not valid : " << path << std::endl;
|
||||
return "";
|
||||
}
|
||||
std::string res = resolved_path.get();
|
||||
return res;
|
||||
}
|
||||
|
||||
char *ReadFile(const char *file, size_t *size) {
|
||||
if (file == nullptr) {
|
||||
std::cerr << "file is nullptr." << std::endl;
|
||||
|
@ -68,14 +96,11 @@ void GenerateRandomData(int size, void *data, Distribution distribution) {
|
|||
int GenerateInputDataWithRandom(std::vector<mindspore::tensor::MSTensor *> inputs) {
|
||||
for (auto tensor : inputs) {
|
||||
auto input_data = tensor->MutableData();
|
||||
void *random_data = malloc(tensor->Size());
|
||||
if (input_data == nullptr) {
|
||||
std::cerr << "MallocData for inTensor failed." << std::endl;
|
||||
return -1;
|
||||
}
|
||||
GenerateRandomData<float>(tensor->Size(), random_data, std::uniform_real_distribution<float>(0.1f, 1.0f));
|
||||
// Copy data to input tensor.
|
||||
memcpy(input_data, random_data, tensor->Size());
|
||||
GenerateRandomData<float>(tensor->Size(), input_data, std::uniform_real_distribution<float>(0.1f, 1.0f));
|
||||
}
|
||||
return mindspore::lite::RET_OK;
|
||||
}
|
||||
|
@ -126,6 +151,7 @@ mindspore::session::LiteSession *Compile(mindspore::lite::Model *model) {
|
|||
// Compile graph.
|
||||
auto ret = session->CompileGraph(model);
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
delete session;
|
||||
std::cerr << "Compile failed while running." << std::endl;
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -143,9 +169,13 @@ int CompileAndRun(int argc, const char **argv) {
|
|||
return -1;
|
||||
}
|
||||
// Read model file.
|
||||
auto model_path = argv[1];
|
||||
auto model_path = RealPath(argv[1]);
|
||||
if (model_path.empty()) {
|
||||
std::cerr << "model path " << argv[1] << " is invalid.";
|
||||
return -1;
|
||||
}
|
||||
size_t size = 0;
|
||||
char *model_buf = ReadFile(model_path, &size);
|
||||
char *model_buf = ReadFile(model_path.c_str(), &size);
|
||||
if (model_buf == nullptr) {
|
||||
std::cerr << "Read model file failed." << std::endl;
|
||||
return -1;
|
||||
|
@ -160,12 +190,15 @@ int CompileAndRun(int argc, const char **argv) {
|
|||
// Compile MindSpore Lite model.
|
||||
auto session = Compile(model);
|
||||
if (session == nullptr) {
|
||||
delete model;
|
||||
std::cerr << "Create session failed." << std::endl;
|
||||
return -1;
|
||||
}
|
||||
// Run inference.
|
||||
auto ret = Run(session);
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
delete model;
|
||||
delete session;
|
||||
std::cerr << "MindSpore Lite run failed." << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ target_link_libraries(
|
|||
SET(CMAKE_INSTALL_PREFIX ${CMAKE_CURRENT_SOURCE_DIR}/build/tmp)
|
||||
|
||||
INSTALL(TARGETS runtime_cpp
|
||||
DESTINATION exe)
|
||||
DESTINATION bin)
|
||||
|
||||
INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/lib/libhiai.so
|
||||
DESTINATION lib)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
## 构建与运行
|
||||
# 构建与运行
|
||||
|
||||
- 环境要求
|
||||
- 系统环境:Linux x86_64,推荐使用Ubuntu 18.04.02LTS
|
||||
|
@ -7,7 +7,7 @@
|
|||
- [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0
|
||||
- [Android_NDK](https://dl.google.com/android/repository/android-ndk-r20b-linux-x86_64.zip) >= r20
|
||||
- [Git](https://git-scm.com/downloads) >= 2.28.0
|
||||
|
||||
|
||||
- 编译构建
|
||||
|
||||
在`mindspore/lite/examples/runtime_cpp`目录下执行build脚本,将能够自动下载相关文件并编译Demo。
|
||||
|
@ -48,8 +48,19 @@
|
|||
export LD_LIBRARY_PATH = /data/local/tmp/runtime_cpp_demo/lib:{LD_LIBRARY_PATH}
|
||||
```
|
||||
|
||||
编译构建后,进入`mindspore/lite/examples/runtime_cpp/build`目录,并执行以下命令,体验MindSpore Lite推理mobilenetv2模型。
|
||||
运行示例需要传递两个参数,第一个参数是模型路径,第二个参数是Option,不同的Option将会运行不同的推理流程。
|
||||
|
||||
| option | 流程 |
|
||||
| ------ | --------------------------- |
|
||||
| 0 | 基本推理流程 |
|
||||
| 1 | 输入维度Resize流程 |
|
||||
| 2 | CreateSession简化版接口流程 |
|
||||
| 3 | Session并行流程 |
|
||||
| 4 | 共享内存池流程 |
|
||||
| 5 | 回调运行流程 |
|
||||
|
||||
例如:可以执行以下命令,体验MindSpore Lite推理MobileNetV2模型。
|
||||
|
||||
```bash
|
||||
./runtime_cpp ../model/mobilenetv2.ms 0
|
||||
cd ./runtime_cpp_demo/bin && ./runtime_cpp ../model/mobilenetv2.ms 0
|
||||
```
|
||||
|
|
|
@ -26,6 +26,34 @@
|
|||
#include "include/lite_session.h"
|
||||
#include "include/version.h"
|
||||
|
||||
std::string RealPath(const char *path) {
|
||||
size_t PATH_MAX = 4096;
|
||||
if (path == nullptr) {
|
||||
std::cerr << "path is nullptr" << std::endl;
|
||||
return "";
|
||||
}
|
||||
if ((strlen(path)) >= PATH_MAX) {
|
||||
std::cerr << "path is too long" << std::endl;
|
||||
return "";
|
||||
}
|
||||
auto resolved_path = std::make_unique<char[]>(PATH_MAX);
|
||||
if (resolved_path == nullptr) {
|
||||
std::cerr << "new resolved_path failed" << std::endl;
|
||||
return "";
|
||||
}
|
||||
#ifdef _WIN32
|
||||
char *real_path = _fullpath(resolved_path.get(), path, 1024);
|
||||
#else
|
||||
char *real_path = realpath(path, resolved_path.get());
|
||||
#endif
|
||||
if (real_path == nullptr || strlen(real_path) == 0) {
|
||||
std::cerr << "file path is not valid : " << path << std::endl;
|
||||
return "";
|
||||
}
|
||||
std::string res = resolved_path.get();
|
||||
return res;
|
||||
}
|
||||
|
||||
char *ReadFile(const char *file, size_t *size) {
|
||||
if (file == nullptr) {
|
||||
std::cerr << "file is nullptr." << std::endl;
|
||||
|
@ -77,6 +105,8 @@ std::shared_ptr<mindspore::lite::Context> CreateCPUContext() {
|
|||
std::cerr << "New context failed while running." << std::endl;
|
||||
return nullptr;
|
||||
}
|
||||
// Configure the number of worker threads in the thread pool to 2, including the main thread.
|
||||
context->thread_num_ = 2;
|
||||
// CPU device context has default values.
|
||||
auto &cpu_device_info = context->device_list_[0].device_info_.cpu_device_info_;
|
||||
// The large core takes priority in thread and core binding methods. This parameter will work in the BindThread
|
||||
|
@ -317,6 +347,7 @@ int Run(const char *model_path) {
|
|||
auto ret = session->RunGraph();
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
delete model;
|
||||
delete session;
|
||||
std::cerr << "Inference error " << ret << std::endl;
|
||||
return ret;
|
||||
}
|
||||
|
@ -360,6 +391,7 @@ int RunResize(const char *model_path) {
|
|||
auto ret = ResizeInputsTensorShape(session);
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
delete model;
|
||||
delete session;
|
||||
std::cerr << "Resize input tensor shape error." << ret << std::endl;
|
||||
return ret;
|
||||
}
|
||||
|
@ -372,6 +404,7 @@ int RunResize(const char *model_path) {
|
|||
ret = session->RunGraph();
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
delete model;
|
||||
delete session;
|
||||
std::cerr << "Inference error " << ret << std::endl;
|
||||
return ret;
|
||||
}
|
||||
|
@ -411,6 +444,7 @@ int RunCreateSessionSimplified(const char *model_path) {
|
|||
session->BindThread(true);
|
||||
auto ret = session->RunGraph();
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
delete session;
|
||||
std::cerr << "Inference error " << ret << std::endl;
|
||||
return ret;
|
||||
}
|
||||
|
@ -462,10 +496,6 @@ int RunSessionParallel(const char *model_path) {
|
|||
GetInputsByTensorNameAndSetData(session1);
|
||||
auto status = session1->RunGraph();
|
||||
if (status != 0) {
|
||||
if (model != nullptr) {
|
||||
delete model;
|
||||
model = nullptr;
|
||||
}
|
||||
std::cerr << "Inference error " << status << std::endl;
|
||||
return;
|
||||
}
|
||||
|
@ -476,10 +506,6 @@ int RunSessionParallel(const char *model_path) {
|
|||
GetInputsByTensorNameAndSetData(session2);
|
||||
auto status = session2->RunGraph();
|
||||
if (status != 0) {
|
||||
if (model != nullptr) {
|
||||
delete model;
|
||||
model = nullptr;
|
||||
}
|
||||
std::cerr << "Inference error " << status << std::endl;
|
||||
return;
|
||||
}
|
||||
|
@ -536,6 +562,7 @@ int RunWithSharedMemoryPool(const char *model_path) {
|
|||
auto ret = session1->CompileGraph(model);
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
delete model;
|
||||
delete session1;
|
||||
std::cerr << "Compile failed while running." << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
@ -552,6 +579,7 @@ int RunWithSharedMemoryPool(const char *model_path) {
|
|||
auto session2 = mindspore::session::LiteSession::CreateSession(context2.get());
|
||||
if (session2 == nullptr) {
|
||||
delete model;
|
||||
delete session1;
|
||||
std::cerr << "CreateSession failed while running " << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
@ -559,6 +587,8 @@ int RunWithSharedMemoryPool(const char *model_path) {
|
|||
ret = session2->CompileGraph(model);
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
delete model;
|
||||
delete session1;
|
||||
delete session2;
|
||||
std::cerr << "Compile failed while running " << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
@ -580,6 +610,8 @@ int RunWithSharedMemoryPool(const char *model_path) {
|
|||
ret = session2->RunGraph();
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
delete model;
|
||||
delete session1;
|
||||
delete session2;
|
||||
std::cerr << "Inference error " << ret << std::endl;
|
||||
return ret;
|
||||
}
|
||||
|
@ -646,6 +678,7 @@ int RunCallback(const char *model_path) {
|
|||
auto ret = session->RunGraph(before_call_back, after_call_back);
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
delete model;
|
||||
delete session;
|
||||
std::cerr << "Inference error " << ret << std::endl;
|
||||
return ret;
|
||||
}
|
||||
|
@ -665,33 +698,37 @@ int RunCallback(const char *model_path) {
|
|||
|
||||
int main(int argc, const char **argv) {
|
||||
if (argc < 3) {
|
||||
std::cerr << "Usage: ./runtime_cpp model_path flag" << std::endl;
|
||||
std::cerr << "Usage: ./runtime_cpp model_path Option" << std::endl;
|
||||
std::cerr << "Example: ./runtime_cpp ../model/mobilenetv2.ms 0" << std::endl;
|
||||
std::cerr << "When your Flag is 0, you will run MindSpore Lite inference." << std::endl;
|
||||
std::cerr << "When your Flag is 1, you will run MindSpore Lite inference with resize." << std::endl;
|
||||
std::cerr << "When your Flag is 2, you will run MindSpore Lite inference with CreateSession simplified API."
|
||||
std::cerr << "When your Option is 0, you will run MindSpore Lite inference." << std::endl;
|
||||
std::cerr << "When your Option is 1, you will run MindSpore Lite inference with resize." << std::endl;
|
||||
std::cerr << "When your Option is 2, you will run MindSpore Lite inference with CreateSession simplified API."
|
||||
<< std::endl;
|
||||
std::cerr << "When your Flag is 3, you will run MindSpore Lite inference with session parallel." << std::endl;
|
||||
std::cerr << "When your Flag is 4, you will run MindSpore Lite inference with shared memory pool." << std::endl;
|
||||
std::cerr << "When your Flag is 5, you will run MindSpore Lite inference with callback." << std::endl;
|
||||
std::cerr << "When your Option is 3, you will run MindSpore Lite inference with session parallel." << std::endl;
|
||||
std::cerr << "When your Option is 4, you will run MindSpore Lite inference with shared memory pool." << std::endl;
|
||||
std::cerr << "When your Option is 5, you will run MindSpore Lite inference with callback." << std::endl;
|
||||
return -1;
|
||||
}
|
||||
std::string version = mindspore::lite::Version();
|
||||
std::cout << "MindSpore Lite Version is " << version << std::endl;
|
||||
auto model_path = argv[1];
|
||||
auto model_path = RealPath(argv[1]);
|
||||
if (model_path.empty()) {
|
||||
std::cerr << "model path " << argv[1] << " is invalid.";
|
||||
return -1;
|
||||
}
|
||||
auto flag = argv[2];
|
||||
if (strcmp(flag, "0") == 0) {
|
||||
return Run(model_path);
|
||||
return Run(model_path.c_str());
|
||||
} else if (strcmp(flag, "1") == 0) {
|
||||
return RunResize(model_path);
|
||||
return RunResize(model_path.c_str());
|
||||
} else if (strcmp(flag, "2") == 0) {
|
||||
return RunCreateSessionSimplified(model_path);
|
||||
return RunCreateSessionSimplified(model_path.c_str());
|
||||
} else if (strcmp(flag, "3") == 0) {
|
||||
return RunSessionParallel(model_path);
|
||||
return RunSessionParallel(model_path.c_str());
|
||||
} else if (strcmp(flag, "4") == 0) {
|
||||
return RunWithSharedMemoryPool(model_path);
|
||||
return RunWithSharedMemoryPool(model_path.c_str());
|
||||
} else if (strcmp(flag, "5") == 0) {
|
||||
return RunCallback(model_path);
|
||||
return RunCallback(model_path.c_str());
|
||||
} else {
|
||||
std::cerr << "Unsupported Flag " << flag << std::endl;
|
||||
return -1;
|
||||
|
|
Loading…
Reference in New Issue