mindspore lite: add graph_executor

This commit is contained in:
liu lili 2022-08-26 16:15:00 +08:00
parent 2cbecdfe5c
commit bfaea54e1e
10 changed files with 50 additions and 24 deletions

View File

@ -11,6 +11,7 @@ else()
set(BUILD_DIR ${TOP_DIR}/mindspore/lite/build)
endif()
set(TEST_CASE_DIR ${TOP_DIR}/mindspore/lite/test/build)
set(EXTENDRT_BUILD_DIR ${TOP_DIR}/mindspore/lite/build/src/extendrt)
set(RUNTIME_DIR ${RUNTIME_PKG_NAME}/runtime)
set(RUNTIME_INC_DIR ${RUNTIME_PKG_NAME}/runtime/include)
@ -23,6 +24,7 @@ set(SECUREC_DIR ${RUNTIME_PKG_NAME}/runtime/third_party/securec)
set(MINDSPORE_LITE_LIB_NAME libmindspore-lite)
set(MINDSPORE_LITE_EXTENDRT_LIB_NAME libmindspore-lite)
set(MINDSPORE_CORE_LIB_NAME libmindspore_core)
set(MINDSPORE_GE_LITERT_LIB_NAME libmsplugin-ge-litert)
set(BENCHMARK_NAME benchmark)
set(MSLITE_NNIE_LIB_NAME libmslite_nnie)
set(MSLITE_PROPOSAL_LIB_NAME libmslite_proposal)
@ -407,6 +409,10 @@ if(PLATFORM_ARM64)
if(MSLITE_ENABLE_CLOUD_FUSION_INFERENCE)
install(FILES ${TOP_DIR}/mindspore/lite/build/src/extendrt/${MINDSPORE_LITE_EXTENDRT_LIB_NAME}.so
DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${EXTENDRT_BUILD_DIR}/delegate/graph_executor/litert/${MINDSPORE_GE_LITERT_LIB_NAME}.so
DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${BUILD_DIR}/tools/converter/libmindspore_converter.so
DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${glog_LIBPATH}/libmindspore_glog.so.0.4.0 DESTINATION ${RUNTIME_LIB_DIR}
RENAME libmindspore_glog.so.0 COMPONENT ${RUNTIME_COMPONENT_NAME})
install(TARGETS mindspore_core DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
@ -642,6 +648,10 @@ elseif(PLATFORM_ARM32)
if(MSLITE_ENABLE_CLOUD_FUSION_INFERENCE)
install(FILES ${TOP_DIR}/mindspore/lite/build/src/extendrt/${MINDSPORE_LITE_EXTENDRT_LIB_NAME}.so
DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${EXTENDRT_BUILD_DIR}/delegate/graph_executor/litert/${MINDSPORE_GE_LITERT_LIB_NAME}.so
DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${BUILD_DIR}/tools/converter/libmindspore_converter.so
DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${glog_LIBPATH}/libmindspore_glog.so.0.4.0 DESTINATION ${RUNTIME_LIB_DIR}
RENAME libmindspore_glog.so.0 COMPONENT ${RUNTIME_COMPONENT_NAME})
install(TARGETS mindspore_core DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
@ -828,6 +838,10 @@ else()
if(MSLITE_ENABLE_CLOUD_FUSION_INFERENCE)
install(FILES ${TOP_DIR}/mindspore/lite/build/src/extendrt/${MINDSPORE_LITE_EXTENDRT_LIB_NAME}.so
DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${EXTENDRT_BUILD_DIR}/delegate/graph_executor/litert/${MINDSPORE_GE_LITERT_LIB_NAME}.so
DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${BUILD_DIR}/tools/converter/libmindspore_converter.so
DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${glog_LIBPATH}/libmindspore_glog.so.0.4.0 DESTINATION ${RUNTIME_LIB_DIR}
RENAME libmindspore_glog.so.0 COMPONENT ${RUNTIME_COMPONENT_NAME})
install(TARGETS mindspore_core DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})

View File

@ -480,24 +480,26 @@ if(MSLITE_ENABLE_MINDRT)
target_link_libraries(mindspore-lite_static mindrt_mid)
endif()
if(SUPPORT_TENSORRT)
add_compile_definitions(GPU_TENSORRT)
set(TENSORRT_PATH $ENV{TENSORRT_PATH})
set(CUDA_PATH $ENV{CUDA_HOME})
set(TENSORRT_LIB_PATH ${TENSORRT_PATH}/lib)
set(CUDA_LIB_PATH ${CUDA_PATH}/lib64)
include_directories(${TENSORRT_PATH}/include)
include_directories(${CUDA_PATH}/include)
add_subdirectory(litert/delegate/tensorrt)
target_link_libraries(mindspore-lite tensorrt_kernel_mid cuda_kernel_mid gpu_distribution_collective)
target_link_libraries(mindspore-lite_static tensorrt_kernel_mid cuda_kernel_mid gpu_distribution_collective)
else()
set(TENSORRT_STUB
${CMAKE_CURRENT_SOURCE_DIR}/litert/delegate/tensorrt/distribution/distribution_base.cc
)
add_library(tensorrt_stub OBJECT ${TENSORRT_STUB})
target_link_libraries(mindspore-lite tensorrt_stub)
target_link_libraries(mindspore-lite_static tensorrt_stub)
if(NOT MSLITE_ENABLE_CLOUD_FUSION_INFERENCE)
if(SUPPORT_TENSORRT)
add_compile_definitions(GPU_TENSORRT)
set(TENSORRT_PATH $ENV{TENSORRT_PATH})
set(CUDA_PATH $ENV{CUDA_HOME})
set(TENSORRT_LIB_PATH ${TENSORRT_PATH}/lib)
set(CUDA_LIB_PATH ${CUDA_PATH}/lib64)
include_directories(${TENSORRT_PATH}/include)
include_directories(${CUDA_PATH}/include)
add_subdirectory(litert/delegate/tensorrt)
target_link_libraries(mindspore-lite tensorrt_kernel_mid cuda_kernel_mid gpu_distribution_collective)
target_link_libraries(mindspore-lite_static tensorrt_kernel_mid cuda_kernel_mid gpu_distribution_collective)
else()
set(TENSORRT_STUB
${CMAKE_CURRENT_SOURCE_DIR}/litert/delegate/tensorrt/distribution/distribution_base.cc
)
add_library(tensorrt_stub OBJECT ${TENSORRT_STUB})
target_link_libraries(mindspore-lite tensorrt_stub)
target_link_libraries(mindspore-lite_static tensorrt_stub)
endif()
endif()
if(MSLITE_GPU_BACKEND STREQUAL opencl)

View File

@ -7,6 +7,7 @@ set(MSLITE_EXTEND_RUNTIME_SRC ${MSLITE_EXTEND_RUNTIME_SRC}
${MODEL_LOADER_FRAMEWORK_SRC})
if(MSLITE_ENABLE_CLOUD_FUSION_INFERENCE)
set(ENABLE_CPU on)
set(LOAD_PLUGIN_STATIC on)
add_compile_definitions(USE_GLOG)
string(REPLACE "-fno-rtti" "" CMAKE_C_FLAGS ${CMAKE_C_FLAGS})
string(REPLACE "-fno-rtti" "" CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS})
@ -141,6 +142,7 @@ if(MSLITE_ENABLE_CLOUD_FUSION_INFERENCE)
add_dependencies(mindspore-extendrt mindspore-infer-anfalgo)
add_dependencies(mindspore-extendrt mindspore-kernel-graph)
add_subdirectory(cxx_api)
add_subdirectory(delegate/graph_executor/litert)
# when cpu kernel is need
#if(NOT MSLITE_ENABLE_ACL)
@ -156,6 +158,8 @@ if(MSLITE_ENABLE_CLOUD_FUSION_INFERENCE)
mindspore-kernel-graph _mindspore_backend_common_optimizer_obj
_mindspore_backend_common_pass_obj)
target_link_libraries(mindspore-extendrt mindspore_core mindspore::protobuf mindspore::pybind11_module)
add_dependencies(mindspore-extendrt msplugin-ge-litert)
target_link_libraries(mindspore-extendrt msplugin-ge-litert)
if(NOT WIN32)
target_link_libraries(mindspore-extendrt dl)

View File

@ -30,7 +30,7 @@ set(LITE_SRC
${LITE_DIR}/src/litert/infer_manager.cc
${LITE_DIR}/src/litert/runtime_shape_fusion_pass.cc
${LITE_DIR}/src/litert/runtime_pass.cc
${LITE_DIR}/src/litert/pass/runtime_ncx_pass.cc
# ${LITE_DIR}/src/litert/pass/runtime_ncx_pass.cc
${LITE_DIR}/src/litert/schema_tensor_wrapper.cc
${LITE_DIR}/src/tensor.cc
${LITE_DIR}/src/tensorlist.cc

View File

@ -1,5 +1,5 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
* Copyright 2019-2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
* Copyright 2019-2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
/**
* Copyright 2019-2021 Huawei Technologies Co., Ltd
* Copyright 2019-2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
/**
* Copyright 2019-2021 Huawei Technologies Co., Ltd
* Copyright 2019-2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -36,7 +36,7 @@ class LiteRTGraphExecutor : public GraphExecutor {
bool CompileGraph(const FuncGraphPtr &graph, const std::map<string, string> &compile_options) override;
bool RunGraph(const FuncGraphPtr &graph, const std::vector<tensor::Tensor> &inputs,
std::vector<tensor::Tensor> *outputs, const std::map<string, string> &compile_options) override;
bool Resize(const std::vector<tensor::Tensor> &inputs, const std::vector<std::vector<int64_t>> &dims) override;
bool Resize(const std::vector<tensor::Tensor> &inputs, const std::vector<std::vector<int64_t>> &dims);
std::vector<tensor::TensorPtr> GetInputs();
std::shared_ptr<lite::LiteSession> CreateLiteSession(lite::InnerContext *context);
std::vector<MSTensor> GetLiteSessionInputs();

View File

@ -225,6 +225,11 @@ int ConverterImpl::Convert(const std::shared_ptr<ConverterPara> &param, schema::
return RET_OK;
}
int ConverterImpl::Convert(const std::shared_ptr<ConverterPara> &param, schema::MetaGraphT **meta_graph,
FuncGraphPtr func_graph) {
return RET_OK;
}
schema::MetaGraphT *ConverterImpl::TransferFuncGraph(const std::shared_ptr<ConverterPara> &param,
FuncGraphPtr func_graph) {
MS_CHECK_TRUE_MSG(metagraph_transform_ != nullptr, nullptr, "metagraph_transform_ init failed");

View File

@ -55,6 +55,7 @@ class ConverterImpl {
int Convert(const std::shared_ptr<ConverterPara> &param, schema::MetaGraphT **meta_graph);
int Convert(const std::shared_ptr<ConverterPara> &param, schema::MetaGraphT **meta_graph, const void *buf,
const size_t &size);
int Convert(const std::shared_ptr<ConverterPara> &param, schema::MetaGraphT **meta_graph, FuncGraphPtr func_graph);
private:
FuncGraphPtr BuildFuncGraph(const std::shared_ptr<ConverterPara> &param);