remove pybind calling in cxx library

Signed-off-by: zhoufeng <zhoufeng54@huawei.com>
This commit is contained in:
zhoufeng 2020-12-08 23:37:12 +08:00
parent e1e8f1d429
commit cd1ce73a25
17 changed files with 584 additions and 437 deletions

View File

@ -37,6 +37,7 @@ set(INSTALL_LIB_DIR ${CMAKE_INSTALL_LIBDIR} CACHE PATH "Installation directory f
set(INSTALL_PY_DIR ".")
set(INSTALL_BASE_DIR ".")
set(INSTALL_BIN_DIR "bin")
set(INSTALL_CFG_DIR "config")
if (CMAKE_SYSTEM_NAME MATCHES "Windows")
set(INSTALL_LIB_DIR ".")
@ -308,6 +309,13 @@ install(
COMPONENT mindspore
)
## config files
install(
FILES ${CMAKE_SOURCE_DIR}/config/op_info.config
DESTINATION ${INSTALL_CFG_DIR}
COMPONENT mindspore
)
if (ENABLE_SERVING)
install(
TARGETS ms_serving

File diff suppressed because one or more lines are too long

View File

@ -106,17 +106,15 @@ extern MS_API const char *kDeviceTypeAscend310;
extern MS_API const char *kDeviceTypeAscend910;
constexpr auto kModelOptionDumpCfgPath = "mindspore.option.dump_config_file_path";
constexpr auto kModelOptionDvppCfgPath = "mindspore.option.dvpp_config_file_path";
constexpr auto kModelOptionInsertOpCfgPath = "mindspore.option.insert_op_config_file_path"; // aipp config file
constexpr auto kModelOptionInputFormat = "mindspore.option.input_format"; // nchw or nhwc
// Mandatory while dynamic batch: e.g. "input_op_name1: n1,c2,h3,w4;input_op_name2: n4,c3,h2,w1"
constexpr auto kModelOptionInputShape = "mindspore.option.input_shape";
constexpr auto kModelOptionDynamicBatchSize = "mindspore.option.dynamic_batch_size";
constexpr auto kModelOptionDynamicImageSize = "mindspore.option.dynamic_image_size";
constexpr auto kModelOptionDynamicDims = "mindspore.option.dynamic_dims";
constexpr auto kModelOptionSerialInput = "mindspore.option.serial_inputs_name"; // separated by ';'
constexpr auto kModelOptionOutputNode = "mindspore.option.output_node"; // e.g. "node_name1:0;node_name2:1"
constexpr auto kModelOptionOutputType = "mindspore.option.output_type"; // "FP32", "UINT8" or "FP16", default as "FP32"
constexpr auto kModelOptionPrecisionMode = "mindspore.option.precision_mode";
// "force_fp16", "allow_fp32_to_fp16", "must_keep_origin_dtype" or "allow_mix_precision", default as "force_fp16"
constexpr auto kModelOptionOpSelectImplMode = "mindspore.option.op_select_impl_mode";
// "high_precision" or "high_performance", default as "high_performance"
} // namespace api
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_TYPES_H

View File

@ -10,6 +10,7 @@ if (ENABLE_ACL)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/src/ge)
include_directories(${CMAKE_BINARY_DIR}/proto/ge)
file(GLOB_RECURSE API_ACL_SRC ${CMAKE_CURRENT_SOURCE_DIR}
"python_utils.cc"
"model/acl/*.cc"
"model/model_converter_utils/*.cc"
"graph/acl/*.cc"
@ -17,14 +18,13 @@ if (ENABLE_ACL)
endif ()
if (ENABLE_D)
file(GLOB_RECURSE API_MS_INFER_SRC ${CMAKE_CURRENT_SOURCE_DIR} "model/ms/*.cc" "graph/ms/*.cc")
file(GLOB_RECURSE API_MS_INFER_SRC ${CMAKE_CURRENT_SOURCE_DIR} "python_utils.cc" "model/ms/*.cc" "graph/ms/*.cc")
endif ()
set(MSLIB_SRC ${CMAKE_CURRENT_SOURCE_DIR}/types.cc
${CMAKE_CURRENT_SOURCE_DIR}/context.cc
${CMAKE_CURRENT_SOURCE_DIR}/cell.cc
${CMAKE_CURRENT_SOURCE_DIR}/serialization.cc
${CMAKE_CURRENT_SOURCE_DIR}/python_utils.cc
${CMAKE_CURRENT_SOURCE_DIR}/graph/graph.cc
${CMAKE_CURRENT_SOURCE_DIR}/graph/graph_data.cc
${CMAKE_CURRENT_SOURCE_DIR}/model/model.cc

View File

@ -219,7 +219,6 @@ Status AclGraphImpl::Load() {
Status AclGraphImpl::ConvertToOM() {
MS_LOG(INFO) << "Start convert to om model.";
RegAllOpFromPython();
if (graph_ == nullptr) {
MS_LOG(ERROR) << "Invalid graph_ is null.";
return FAILED;

View File

@ -15,6 +15,7 @@
*/
#include "cxx_api/graph/acl/model_process.h"
#include <sys/time.h>
#include <algorithm>
#include <map>
#include "utils/utils.h"
@ -345,7 +346,18 @@ Status ModelProcess::PredictFromHost(const std::vector<Buffer> &inputs, std::vec
DestroyInputsDataset();
return ret; // forward status error
}
struct timeval start_time;
struct timeval end_time;
(void)gettimeofday(&start_time, nullptr);
acl_ret = aclmdlExecute(model_id_, inputs_, outputs_);
(void)gettimeofday(&end_time, nullptr);
constexpr uint64_t kUSecondInSecond = 1000000;
uint64_t cost =
(kUSecondInSecond * static_cast<uint64_t>(end_time.tv_sec) + static_cast<uint64_t>(end_time.tv_usec)) -
(kUSecondInSecond * static_cast<uint64_t>(start_time.tv_sec) + static_cast<uint64_t>(start_time.tv_usec));
MS_LOG(INFO) << "Model execute in " << cost << " us";
DestroyInputsDataset();
if (acl_ret != ACL_ERROR_NONE) {
MS_LOG(ERROR) << "Execute Model Failed";

View File

@ -72,12 +72,16 @@ MsGraphImpl::MsGraphImpl()
outputs_(),
input_names_(),
output_names_(),
init_flag_(false),
load_flag_(false) {}
MsGraphImpl::~MsGraphImpl() { (void)FinalizeEnv(); }
Status MsGraphImpl::InitEnv() {
RegAllOpFromPython();
if (init_flag_) {
return SUCCESS;
}
RegAllOp();
auto ms_context = MsContext::GetInstance();
if (ms_context == nullptr) {
MS_LOG(ERROR) << "Get Context failed!";
@ -100,12 +104,16 @@ Status MsGraphImpl::InitEnv() {
}
session_impl_->Init(device_id_);
init_flag_ = true;
return SUCCESS;
}
Status MsGraphImpl::FinalizeEnv() {
if (!init_flag_) {
return SUCCESS;
}
MS_LOG_INFO << "Start finalize env";
pybind11::gil_scoped_acquire acquire;
session::ExecutorManager::Instance().Clear();
device::KernelRuntimeManager::Instance().ClearRuntimeResource();
auto ms_context = MsContext::GetInstance();
@ -117,6 +125,8 @@ Status MsGraphImpl::FinalizeEnv() {
MS_LOG(ERROR) << "CloseTsd failed!";
return FAILED;
}
init_flag_ = false;
MS_LOG(INFO) << "End finalize env";
return SUCCESS;
}
@ -125,7 +135,6 @@ Status MsGraphImpl::CompileGraph(const std::shared_ptr<FuncGraph> &funcGraphPtr)
MS_ASSERT(session_impl_ != nullptr);
try {
graph_id_ = session_impl_->CompileGraph(NOT_NULL(funcGraphPtr));
pybind11::gil_scoped_release gil_release;
return SUCCESS;
} catch (std::exception &e) {
MS_LOG(ERROR) << "CompileGraph failed: " << e.what();

View File

@ -59,6 +59,7 @@ class MsGraphImpl : public GraphCell::GraphImpl {
std::vector<tensor::TensorPtr> outputs_;
std::vector<std::string> input_names_;
std::vector<std::string> output_names_;
bool init_flag_;
bool load_flag_;
};
} // namespace mindspore::api

View File

@ -25,7 +25,6 @@ API_FACTORY_REG(ModelImpl, Ascend310, AclModel);
Status AclModel::Build(const std::map<std::string, std::string> &options_map) {
MS_LOG(INFO) << "Start build model.";
MS_EXCEPTION_IF_NULL(graph_);
RegAllOpFromPython();
std::unique_ptr<AclModelOptions> options = std::make_unique<AclModelOptions>(options_map);
std::string options_str = GenerateOptionsStr(options_map);
MS_EXCEPTION_IF_NULL(options);

View File

@ -15,6 +15,7 @@
*/
#include "cxx_api/model/acl/acl_model_options.h"
#include <memory>
#include "utils/log_adapter.h"
#include "external/ge/ge_api_types.h"
namespace mindspore::api {
@ -27,40 +28,45 @@ static std::string ParseOption(const std::map<std::string, std::string> &options
}
AclModelOptions::AclModelOptions(const std::map<std::string, std::string> &options) {
dump_cfg_path = ParseOption(options, kModelOptionDumpCfgPath);
dvpp_cfg_path = ParseOption(options, kModelOptionDvppCfgPath);
output_node = ParseOption(options, kModelOptionOutputNode);
// to acl
insert_op_cfg_path = ParseOption(options, kModelOptionInsertOpCfgPath);
input_format = ParseOption(options, kModelOptionInputFormat);
input_shape = ParseOption(options, kModelOptionInputShape);
dynamic_batch_size = ParseOption(options, kModelOptionInputShape);
dynamic_image_size = ParseOption(options, kModelOptionInputShape);
dynamic_dims = ParseOption(options, kModelOptionInputShape);
serial_nodes_name = ParseOption(options, kModelOptionSerialInput);
output_type = ParseOption(options, kModelOptionOutputType);
precision_mode = ParseOption(options, kModelOptionPrecisionMode);
op_select_impl_mode = ParseOption(options, kModelOptionOpSelectImplMode);
}
std::map<std::string, std::string> AclModelOptions::GenAclOptions() const {
const std::map<std::string const *, std::string> acl_options_map = {
{&insert_op_cfg_path, ge::ir_option::INSERT_OP_FILE},
{&input_format, ge::ir_option::INPUT_FORMAT},
{&input_shape, ge::ir_option::INPUT_SHAPE},
{&dynamic_batch_size, ge::ir_option::DYNAMIC_BATCH_SIZE},
{&dynamic_image_size, ge::ir_option::DYNAMIC_IMAGE_SIZE},
{&dynamic_dims, ge::ir_option::DYNAMIC_DIMS},
{&serial_nodes_name, ge::ir_option::INPUT_FP16_NODES},
{&output_type, ge::ir_option::OUTPUT_TYPE},
std::tuple<std::map<std::string, std::string>, std::map<std::string, std::string>> AclModelOptions::GenAclOptions()
const {
const std::map<std::string const *, std::string> init_options_map = {
{&op_select_impl_mode, ge::ir_option::OP_SELECT_IMPL_MODE},
{&soc_version, ge::ir_option::SOC_VERSION},
};
std::map<std::string, std::string> acl_options;
for (auto [ms_option, acl_option_key] : acl_options_map) {
const std::map<std::string const *, std::string> build_options_map = {
{&insert_op_cfg_path, ge::ir_option::INSERT_OP_FILE}, {&input_format, ge::ir_option::INPUT_FORMAT},
{&input_shape, ge::ir_option::INPUT_SHAPE}, {&output_type, ge::ir_option::OUTPUT_TYPE},
{&precision_mode, ge::ir_option::PRECISION_MODE},
};
std::map<std::string, std::string> init_options;
std::map<std::string, std::string> build_options;
for (auto [ms_option, acl_option_key] : init_options_map) {
if (ms_option == nullptr || ms_option->empty()) {
continue;
}
acl_options.emplace(acl_option_key, *ms_option);
MS_LOG(INFO) << "Option " << acl_option_key << " : " << *ms_option;
init_options.emplace(acl_option_key, *ms_option);
}
return acl_options;
}
for (auto [ms_option, acl_option_key] : build_options_map) {
if (ms_option == nullptr || ms_option->empty()) {
continue;
}
MS_LOG(INFO) << "Option " << acl_option_key << " : " << *ms_option;
build_options.emplace(acl_option_key, *ms_option);
}
return {init_options, build_options};
}
} // namespace mindspore::api

View File

@ -19,28 +19,27 @@
#include <vector>
#include <string>
#include <map>
#include <tuple>
#include "include/api/types.h"
#include "include/api/status.h"
namespace mindspore::api {
struct AclModelOptions {
std::string dump_cfg_path;
std::string dvpp_cfg_path;
std::string output_node; // todo: at convert.cc::BuildGraph(), no atc options
// build options
std::string insert_op_cfg_path;
std::string input_format;
std::string input_shape;
std::string dynamic_batch_size;
std::string dynamic_image_size;
std::string dynamic_dims;
std::string serial_nodes_name;
std::string output_type;
std::string precision_mode;
std::string op_select_impl_mode;
std::string soc_version = "Ascend310";
explicit AclModelOptions(const std::map<std::string, std::string> &options);
~AclModelOptions() = default;
std::map<std::string, std::string> GenAclOptions() const;
// return tuple<init_options, build_options>
std::tuple<std::map<std::string, std::string>, std::map<std::string, std::string>> GenAclOptions() const;
};
} // namespace mindspore::api

View File

@ -131,17 +131,16 @@ transform::DfGraphPtr ModelConverter::ConvertFuncGraphToAIR(const FuncGraphPtr &
}
Buffer ModelConverter::BuildAirModel(const transform::DfGraphPtr &graph,
const std::map<std::string, std::string> &acl_options) {
const std::map<std::string, std::string> &init_options,
const std::map<std::string, std::string> &build_options) {
ge::ModelBufferData model;
auto ge_options = acl_options;
ge_options.emplace(ge::ir_option::SOC_VERSION, "Ascend310");
auto ret = ge::aclgrphBuildInitialize(ge_options);
auto ret = ge::aclgrphBuildInitialize(init_options);
if (ret != ge::SUCCESS) {
MS_LOG(ERROR) << "Call aclgrphBuildInitialize fail.";
return Buffer();
}
ret = ge::aclgrphBuildModel(*graph, acl_options, model);
ret = ge::aclgrphBuildModel(*graph, build_options, model);
if (ret != ge::SUCCESS) {
MS_LOG(ERROR) << "Call aclgrphBuildModel fail.";
return Buffer();
@ -290,7 +289,6 @@ Buffer ModelConverter::LoadAscendIR(const Buffer &model_data) {
}
Buffer ModelConverter::LoadMindIRInner(const FuncGraphPtr &func_graph) {
RegAllOpFromPython();
if (func_graph == nullptr) {
MS_LOG(ERROR) << "Convert MindIR to FuncGraph failed.";
return Buffer();
@ -302,17 +300,17 @@ Buffer ModelConverter::LoadMindIRInner(const FuncGraphPtr &func_graph) {
return Buffer();
}
std::map<std::string, std::string> acl_options;
std::map<std::string, std::string> init_options;
std::map<std::string, std::string> build_options;
if (options_ != nullptr) {
acl_options = options_->GenAclOptions();
std::tie(init_options, build_options) = options_->GenAclOptions();
}
auto om_data = BuildAirModel(df_graph, acl_options);
auto om_data = BuildAirModel(df_graph, init_options, build_options);
return om_data;
}
Buffer ModelConverter::LoadAscendIRInner(const Buffer &model_data) {
RegAllOpFromPython();
ge::Model load_model = ge::Model("loadmodel", "version2");
ge::Status ret =
ge::Model::Load(reinterpret_cast<const uint8_t *>(model_data.Data()), model_data.DataSize(), load_model);
@ -327,12 +325,13 @@ Buffer ModelConverter::LoadAscendIRInner(const Buffer &model_data) {
return Buffer();
}
std::map<std::string, std::string> acl_options;
std::map<std::string, std::string> init_options;
std::map<std::string, std::string> build_options;
if (options_ != nullptr) {
acl_options = options_->GenAclOptions();
std::tie(init_options, build_options) = options_->GenAclOptions();
}
auto om_data = BuildAirModel(df_graph, acl_options);
auto om_data = BuildAirModel(df_graph, init_options, build_options);
return om_data;
}
} // namespace mindspore::api

View File

@ -39,7 +39,8 @@ class ModelConverter {
private:
transform::DfGraphPtr ConvertFuncGraphToAIR(const FuncGraphPtr &anf_graph);
Buffer BuildAirModel(const transform::DfGraphPtr &graph, const std::map<std::string, std::string> &acl_options);
Buffer BuildAirModel(const transform::DfGraphPtr &graph, const std::map<std::string, std::string> &init_options,
const std::map<std::string, std::string> &build_options);
AclModelOptions *options_;
Buffer LoadMindIRInner(const FuncGraphPtr &func_graph);

View File

@ -14,25 +14,23 @@
* limitations under the License.
*/
#include "cxx_api/python_utils.h"
#include <dlfcn.h>
#include <mutex>
#include <vector>
#include <memory>
#include <string>
#include <fstream>
#include "mindspore/core/utils/ms_context.h"
#include "pybind11/pybind11.h"
#include "backend/kernel_compiler/oplib/oplib.h"
namespace py = pybind11;
namespace mindspore::api {
void RegAllOpFromPython() {
static std::mutex init_mutex;
static bool Initialized = false;
static std::mutex init_mutex;
static bool Initialized = false;
std::lock_guard<std::mutex> lock(init_mutex);
if (Initialized) {
return;
}
Initialized = true;
namespace mindspore::api {
static void RegAllOpFromPython() {
MsContext::GetInstance()->set_param<int>(MS_CTX_EXECUTION_MODE, kGraphMode);
Py_Initialize();
auto c_expression = PyImport_ImportModule("mindspore._c_expression");
@ -61,5 +59,64 @@ void RegAllOpFromPython() {
Py_DECREF(c_expression);
}
static bool RegAllOpFromFile() {
Dl_info info;
int dl_ret = dladdr(reinterpret_cast<void *>(RegAllOpFromFile), &info);
if (dl_ret == 0) {
MS_LOG(INFO) << "Get dladdr failed, skip.";
return false;
}
std::string dir(info.dli_fname);
MS_LOG(INFO) << "Get library path is " << dir;
auto split_pos = dir.find_last_of('/');
if (dir.empty() || split_pos == std::string::npos) {
MS_LOG(INFO) << "Missing op config file, skip.";
return false;
}
dir = dir.substr(0, split_pos) + "/../config/op_info.config";
if (dir.size() >= PATH_MAX) {
MS_LOG(ERROR) << "Op info path is invalid: " << dir;
return false;
}
char real_path_mem[PATH_MAX] = {0};
if (realpath(common::SafeCStr(dir), real_path_mem) == nullptr) {
MS_LOG(ERROR) << "Op info path is invalid: " << dir;
return false;
}
std::string real_path(real_path_mem);
MS_LOG(INFO) << "Start to read op info from local file " << real_path;
std::ifstream file(real_path);
if (!file.is_open()) {
MS_LOG(ERROR) << "Find op info file failed.";
return false;
}
std::string line;
while (getline(file, line)) {
if (!line.empty()) {
(void)kernel::OpLib::RegOp(line, "");
}
}
MS_LOG(INFO) << "End";
return true;
}
void RegAllOp() {
std::lock_guard<std::mutex> lock(init_mutex);
if (Initialized) {
return;
}
bool ret = RegAllOpFromFile();
if (!ret) {
MS_LOG(INFO) << "Reg all op from file failed, start to reg from python.";
RegAllOpFromPython();
}
Initialized = true;
}
bool PythonIsInited() { return Py_IsInitialized() != 0; }
} // namespace mindspore::api

View File

@ -17,10 +17,8 @@
#ifndef MINDSPORE_CCSRC_CXXAPI_PYTHON_UTILS_H
#define MINDSPORE_CCSRC_CXXAPI_PYTHON_UTILS_H
#include "pybind11/pybind11.h"
namespace mindspore::api {
void RegAllOpFromPython();
void RegAllOp();
bool PythonIsInited();
} // namespace mindspore::api

View File

@ -133,6 +133,7 @@ package_data = {
'lib/*.a',
'.commit_id',
'ms_serving',
'config/*'
'include/*',
'include/*/*',
'include/*/*/*',