forked from mindspore-Ecosystem/mindspore
!23391 Fix 310 mindir infer error: undefined symbol
Merge pull request !23391 from luoyang/fix_mindir
This commit is contained in:
commit
a59b3a40a9
|
@ -75,16 +75,29 @@ class MS_API Model {
|
|||
Status Predict(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs,
|
||||
const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr);
|
||||
|
||||
/// \brief Inference model, only for cv model inference.
|
||||
/// \brief Inference model with preprocess in model.
|
||||
///
|
||||
/// \param[in] inputs A string represents the file path of input image.
|
||||
/// \param[in] inputs A vector where model inputs are arranged in sequence.
|
||||
/// \param[out] outputs Which is a pointer to a vector. The model outputs are filled in the container in sequence.
|
||||
/// \param[in] whether to use data preprocess in model.
|
||||
/// \param[in] before CallBack before predict.
|
||||
/// \param[in] after CallBack after predict.
|
||||
///
|
||||
/// \return Status.
|
||||
inline Status Predict(const std::string &input, std::vector<MSTensor> *outputs,
|
||||
const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr);
|
||||
Status PredictWithPreprocess(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs,
|
||||
const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr);
|
||||
|
||||
/// \brief Apply data preprocess if it exits in model.
|
||||
///
|
||||
/// \param[in] inputs A vector where model inputs are arranged in sequence.
|
||||
/// \param[out] outputs Which is a pointer to a vector. The model outputs are filled in the container in sequence.
|
||||
///
|
||||
/// \return Status.
|
||||
Status Preprocess(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs);
|
||||
|
||||
/// \brief Check if data preprocess exists in model.
|
||||
/// \return true if data preprocess exists.
|
||||
bool HasPreprocess();
|
||||
|
||||
/// \brief Load config file.
|
||||
///
|
||||
|
@ -201,8 +214,6 @@ class MS_API Model {
|
|||
std::vector<std::vector<char>> GetOutputTensorNamesChar();
|
||||
MSTensor GetOutputByTensorName(const std::vector<char> &tensor_name);
|
||||
std::vector<MSTensor> GetOutputsByNodeName(const std::vector<char> &node_name);
|
||||
Status Predict(const std::vector<char> &input, std::vector<MSTensor> *outputs, const MSKernelCallBack &before,
|
||||
const MSKernelCallBack &after);
|
||||
|
||||
std::shared_ptr<ModelImpl> impl_;
|
||||
};
|
||||
|
@ -220,10 +231,5 @@ MSTensor Model::GetOutputByTensorName(const std::string &tensor_name) {
|
|||
std::vector<MSTensor> Model::GetOutputsByNodeName(const std::string &node_name) {
|
||||
return GetOutputsByNodeName(StringToChar(node_name));
|
||||
}
|
||||
|
||||
Status Model::Predict(const std::string &input, std::vector<MSTensor> *outputs, const MSKernelCallBack &before,
|
||||
const MSKernelCallBack &after) {
|
||||
return Predict(StringToChar(input), outputs, before, after);
|
||||
}
|
||||
} // namespace mindspore
|
||||
#endif // MINDSPORE_INCLUDE_API_MODEL_H
|
||||
|
|
|
@ -104,6 +104,13 @@ class MS_API MSTensor {
|
|||
static inline MSTensor *CreateDevTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape,
|
||||
const void *data, size_t data_len) noexcept;
|
||||
|
||||
/// \brief Creates a MSTensor object from local image file, must be used in pairs with DestroyTensorPtr.
|
||||
///
|
||||
/// \param[in] image_file Path of image file.
|
||||
///
|
||||
/// \return A pointer of MSTensor.
|
||||
static inline MSTensor *CreateImageTensor(const std::string &image_file) noexcept;
|
||||
|
||||
/// \brief Create a string type MSTensor object whose data can be accessed by Model only after being copied, must be
|
||||
/// used in pair with DestroyTensorPtr.
|
||||
///
|
||||
|
@ -261,6 +268,7 @@ class MS_API MSTensor {
|
|||
const void *data, size_t data_len) noexcept;
|
||||
static MSTensor *CreateDevTensor(const std::vector<char> &name, enum DataType type, const std::vector<int64_t> &shape,
|
||||
const void *data, size_t data_len) noexcept;
|
||||
static MSTensor *CreateImageTensor(const std::vector<char> &image_file) noexcept;
|
||||
static MSTensor *CharStringsToTensor(const std::vector<char> &name, const std::vector<std::vector<char>> &str);
|
||||
static std::vector<std::vector<char>> TensorToStringChars(const MSTensor &tensor);
|
||||
|
||||
|
@ -307,6 +315,10 @@ MSTensor *MSTensor::CreateDevTensor(const std::string &name, enum DataType type,
|
|||
return CreateDevTensor(StringToChar(name), type, shape, data, data_len);
|
||||
}
|
||||
|
||||
MSTensor *MSTensor::CreateImageTensor(const std::string &image_file) noexcept {
|
||||
return CreateImageTensor(StringToChar(image_file));
|
||||
}
|
||||
|
||||
MSTensor *MSTensor::StringsToTensor(const std::string &name, const std::vector<std::string> &str) {
|
||||
return CharStringsToTensor(StringToChar(name), VectorStringToChar(str));
|
||||
}
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
/**
|
||||
* Copyright 2021 Huawei Technologies Co., Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef MINDSPORE_CCSRC_CXX_API_DLUTILS_H_
|
||||
#define MINDSPORE_CCSRC_CXX_API_DLUTILS_H_
|
||||
#if !defined(_WIN32) && !defined(_WIN64)
|
||||
#include <dlfcn.h>
|
||||
#include <dirent.h>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <fstream>
|
||||
#include "utils/file_utils.h"
|
||||
|
||||
namespace mindspore {
|
||||
inline Status DLSoPath(std::string *so_path) {
|
||||
if (so_path == nullptr) {
|
||||
return Status(kMEFailed, "Input so_path can not be nullptr.");
|
||||
}
|
||||
Dl_info dl_info;
|
||||
dladdr(reinterpret_cast<void *>(DLSoPath), &dl_info);
|
||||
std::string libmindspore_so = dl_info.dli_fname;
|
||||
|
||||
auto pos = libmindspore_so.find("libmindspore.so");
|
||||
if (pos == std::string::npos) {
|
||||
return Status(kMEFailed, "Could not find libmindspore.so, check path.");
|
||||
}
|
||||
|
||||
std::string parent_dir = libmindspore_so.substr(0, pos) + "../";
|
||||
std::string c_dataengine_so;
|
||||
|
||||
DIR *dir = opendir(parent_dir.c_str());
|
||||
if (dir != nullptr) {
|
||||
// access all the files and directories within directory
|
||||
dirent *ent = readdir(dir);
|
||||
while (ent != nullptr) {
|
||||
if (std::string(ent->d_name).find("_c_dataengine") != std::string::npos) {
|
||||
c_dataengine_so = std::string(ent->d_name);
|
||||
break;
|
||||
}
|
||||
ent = readdir(dir);
|
||||
}
|
||||
closedir(dir);
|
||||
} else {
|
||||
return Status(kMEFailed, "Could not open directory: " + parent_dir);
|
||||
}
|
||||
|
||||
std::string unreal_path = parent_dir + c_dataengine_so;
|
||||
auto realpath = FileUtils::GetRealPath(unreal_path.c_str());
|
||||
if (!realpath.has_value()) {
|
||||
return Status(kMEFailed, "Get c_dataengine_so real path failed, path: " + unreal_path);
|
||||
}
|
||||
|
||||
*so_path = realpath.value();
|
||||
return kSuccess;
|
||||
}
|
||||
|
||||
inline Status DLSoOpen(const std::string &dl_path, const std::string &func_name, void **handle, void **function) {
|
||||
// do dlopen and export functions from c_dataengine
|
||||
*handle = dlopen(dl_path.c_str(), RTLD_LAZY | RTLD_LOCAL);
|
||||
|
||||
if (*handle == nullptr) {
|
||||
return Status(kMEFailed, "dlopen failed, the pointer[handle] is null.");
|
||||
}
|
||||
|
||||
*function = dlsym(*handle, func_name.c_str());
|
||||
if (*function == nullptr) {
|
||||
return Status(kMEFailed, "Could not find " + func_name + " in " + dl_path);
|
||||
}
|
||||
return kSuccess;
|
||||
}
|
||||
|
||||
inline void DLSoClose(void *handle) {
|
||||
if (handle != nullptr) {
|
||||
(void)dlclose(handle);
|
||||
}
|
||||
}
|
||||
|
||||
#define CHECK_FAIL_AND_RELEASE(_s, _handle, _e) \
|
||||
do { \
|
||||
Status __rc = (_s); \
|
||||
if (__rc.IsError()) { \
|
||||
MS_LOG(ERROR) << (_e); \
|
||||
DLSoClose((_handle)); \
|
||||
return __rc; \
|
||||
} \
|
||||
} while (false)
|
||||
|
||||
} // namespace mindspore
|
||||
#endif
|
||||
#endif // MINDSPORE_CCSRC_CXX_API_DLUTILS_H_
|
|
@ -94,13 +94,29 @@ Status Model::Predict(const std::vector<MSTensor> &inputs, std::vector<MSTensor>
|
|||
return impl_->Predict(inputs, outputs);
|
||||
}
|
||||
|
||||
Status Model::Predict(const std::vector<char> &input, std::vector<MSTensor> *outputs, const MSKernelCallBack &before,
|
||||
const MSKernelCallBack &after) {
|
||||
Status Model::PredictWithPreprocess(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs,
|
||||
const MSKernelCallBack &before, const MSKernelCallBack &after) {
|
||||
if (impl_ == nullptr) {
|
||||
MS_LOG(ERROR) << "Failed because this model has not been built.";
|
||||
return kMCFailed;
|
||||
}
|
||||
return impl_->Predict(CharToString(input), outputs);
|
||||
return impl_->PredictWithPreprocess(inputs, outputs);
|
||||
}
|
||||
|
||||
Status Model::Preprocess(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs) {
|
||||
if (impl_ == nullptr) {
|
||||
MS_LOG(ERROR) << "Failed because this model has not been built.";
|
||||
return kMCFailed;
|
||||
}
|
||||
return impl_->Preprocess(inputs, outputs);
|
||||
}
|
||||
|
||||
bool Model::HasPreprocess() {
|
||||
if (impl_ == nullptr) {
|
||||
MS_LOG(ERROR) << "Failed because this model has not been built.";
|
||||
return false;
|
||||
}
|
||||
return impl_->HasPreprocess();
|
||||
}
|
||||
|
||||
std::vector<MSTensor> Model::GetInputs() {
|
||||
|
|
|
@ -14,9 +14,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
#include "cxx_api/model/model_impl.h"
|
||||
|
||||
#include <fstream>
|
||||
#include "debug/common.h"
|
||||
#include "cxx_api/dlutils.h"
|
||||
|
||||
namespace mindspore {
|
||||
Status ModelImpl::Predict(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs) {
|
||||
|
@ -45,78 +43,52 @@ Status ModelImpl::Predict(const std::vector<MSTensor> &inputs, std::vector<MSTen
|
|||
return kSuccess;
|
||||
}
|
||||
|
||||
Status ModelImpl::Predict(const std::string &input, std::vector<MSTensor> *outputs) {
|
||||
bool ModelImpl::HasPreprocess() { return graph_->graph_data_->GetPreprocess().empty() ? false : true; }
|
||||
|
||||
Status ModelImpl::Preprocess(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs) {
|
||||
#if !defined(_WIN32) && !defined(_WIN64)
|
||||
auto realpath = Common::GetRealPath(input);
|
||||
if (!realpath.has_value()) {
|
||||
MS_LOG(ERROR) << "Get real path failed, path=" << input;
|
||||
return Status(kMEInvalidInput, "Get real path failed, path=" + input);
|
||||
}
|
||||
MS_EXCEPTION_IF_NULL(outputs);
|
||||
|
||||
// Read image file
|
||||
auto file = realpath.value();
|
||||
if (file.empty()) {
|
||||
return Status(kMEInvalidInput, "can not find any input file.");
|
||||
}
|
||||
|
||||
std::ifstream ifs(file, std::ios::in | std::ios::binary);
|
||||
if (!ifs.good()) {
|
||||
return Status(kMEInvalidInput, "File: " + file + " does not exist.");
|
||||
}
|
||||
if (!ifs.is_open()) {
|
||||
return Status(kMEInvalidInput, "File: " + file + " open failed.");
|
||||
}
|
||||
|
||||
auto &io_seekg1 = ifs.seekg(0, std::ios::end);
|
||||
if (!io_seekg1.good() || io_seekg1.fail() || io_seekg1.bad()) {
|
||||
ifs.close();
|
||||
return Status(kMEInvalidInput, "Failed to seekg file: " + file);
|
||||
}
|
||||
|
||||
size_t size = ifs.tellg();
|
||||
MSTensor buffer(file, mindspore::DataType::kNumberTypeUInt8, {static_cast<int64_t>(size)}, nullptr, size);
|
||||
|
||||
auto &io_seekg2 = ifs.seekg(0, std::ios::beg);
|
||||
if (!io_seekg2.good() || io_seekg2.fail() || io_seekg2.bad()) {
|
||||
ifs.close();
|
||||
return Status(kMEInvalidInput, "Failed to seekg file: " + file);
|
||||
}
|
||||
|
||||
auto &io_read = ifs.read(reinterpret_cast<char *>(buffer.MutableData()), size);
|
||||
if (!io_read.good() || io_read.fail() || io_read.bad()) {
|
||||
ifs.close();
|
||||
return Status(kMEInvalidInput, "Failed to read file: " + file);
|
||||
}
|
||||
ifs.close();
|
||||
// Config preprocessor, temporary way to let mindspore.so depends on _c_dataengine
|
||||
std::string dataengine_so_path;
|
||||
Status dlret = DLSoPath(&dataengine_so_path);
|
||||
CHECK_FAIL_AND_RELEASE(dlret, nullptr, "Parse dataengine_so failed: " + dlret.GetErrDescription());
|
||||
|
||||
// Run preprocess
|
||||
std::vector<MSTensor> transform_inputs;
|
||||
std::vector<MSTensor> transform_outputs;
|
||||
transform_inputs.emplace_back(std::move(buffer));
|
||||
MS_LOG(DEBUG) << "transform_inputs[0].Shape: " << transform_inputs[0].Shape();
|
||||
auto preprocessor = graph_->graph_data_->GetPreprocess();
|
||||
if (!preprocessor.empty()) {
|
||||
for (auto exes : preprocessor) {
|
||||
MS_EXCEPTION_IF_NULL(exes);
|
||||
Status ret = exes->operator()(transform_inputs, &transform_outputs);
|
||||
if (ret != kSuccess) {
|
||||
MS_LOG(ERROR) << "Run preprocess failed.";
|
||||
return ret;
|
||||
}
|
||||
MS_LOG(DEBUG) << "transform_outputs[0].Shape: " << transform_outputs[0].Shape();
|
||||
transform_inputs = transform_outputs;
|
||||
}
|
||||
} else {
|
||||
std::string msg = "Attempt to predict with data preprocess, but no preprocess operation is defined in MindIR.";
|
||||
MS_LOG(ERROR) << msg;
|
||||
return Status(kMEFailed, msg);
|
||||
if (!HasPreprocess()) {
|
||||
return Status(kMEFailed, "Attempt to predict with data preprocessor, but no preprocessor is defined in MindIR.");
|
||||
}
|
||||
std::vector<std::shared_ptr<dataset::Execute>> preprocessor = graph_->graph_data_->GetPreprocess();
|
||||
|
||||
void *handle = nullptr;
|
||||
void *function = nullptr;
|
||||
dlret = DLSoOpen(dataengine_so_path, "ExecuteRun_C", &handle, &function);
|
||||
CHECK_FAIL_AND_RELEASE(dlret, handle, "Parse ExecuteRun_C failed: " + dlret.GetErrDescription());
|
||||
|
||||
auto ExecuteRun =
|
||||
(void (*)(const std::vector<std::shared_ptr<dataset::Execute>> &, const std::vector<mindspore::MSTensor> &,
|
||||
std::vector<mindspore::MSTensor> *, Status *))(function);
|
||||
ExecuteRun(preprocessor, inputs, outputs, &dlret);
|
||||
CHECK_FAIL_AND_RELEASE(dlret, handle, "Run preprocess failed: " + dlret.GetErrDescription());
|
||||
DLSoClose(handle);
|
||||
return kSuccess;
|
||||
#else
|
||||
MS_LOG(ERROR) << "Data preprocess is not supported on Windows yet.";
|
||||
return Status(kMEFailed, "Data preprocess is not supported on Windows yet.");
|
||||
#endif
|
||||
}
|
||||
|
||||
Status ModelImpl::PredictWithPreprocess(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs) {
|
||||
#if !defined(_WIN32) && !defined(_WIN64)
|
||||
// Run preprocess
|
||||
std::vector<MSTensor> preprocess_outputs;
|
||||
Status ret = Preprocess(inputs, &preprocess_outputs);
|
||||
if (ret != kSuccess) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Run prediction
|
||||
Status ret = Predict(transform_outputs, outputs);
|
||||
ret = Predict(preprocess_outputs, outputs);
|
||||
if (ret != kSuccess) {
|
||||
MS_LOG(ERROR) << ret.GetErrDescription();
|
||||
MS_LOG(ERROR) << "Run predict failed: " << ret.GetErrDescription();
|
||||
return ret;
|
||||
}
|
||||
return kSuccess;
|
||||
|
|
|
@ -39,13 +39,17 @@ class ModelImpl {
|
|||
|
||||
virtual Status Predict(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs);
|
||||
|
||||
virtual Status Predict(const std::string &input, std::vector<MSTensor> *outputs);
|
||||
virtual Status PredictWithPreprocess(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs);
|
||||
|
||||
virtual std::vector<MSTensor> GetInputs() = 0;
|
||||
virtual std::vector<MSTensor> GetOutputs() = 0;
|
||||
|
||||
virtual bool CheckModelSupport(enum ModelType model_type) { return false; }
|
||||
|
||||
virtual Status Preprocess(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs);
|
||||
|
||||
virtual bool HasPreprocess();
|
||||
|
||||
protected:
|
||||
FuncGraphPtr GetFuncGraph() const {
|
||||
if (graph_->ModelType() != ModelType::kMindIR) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include "utils/log_adapter.h"
|
||||
#include "mindspore/core/load_mindir/load_model.h"
|
||||
#if !defined(_WIN32) && !defined(_WIN64)
|
||||
#include "cxx_api/dlutils.h"
|
||||
#include "minddata/dataset/engine/serdes.h"
|
||||
#include "minddata/dataset/include/dataset/execute.h"
|
||||
#endif
|
||||
|
@ -160,9 +161,8 @@ Status Serialization::Load(const std::vector<char> &file, ModelType model_type,
|
|||
const std::vector<char> &dec_mode) {
|
||||
std::stringstream err_msg;
|
||||
if (graph == nullptr) {
|
||||
err_msg << "Output args graph is nullptr.";
|
||||
MS_LOG(ERROR) << err_msg.str();
|
||||
return Status(kMEInvalidInput, err_msg.str());
|
||||
MS_LOG(ERROR) << "Output args graph is nullptr.";
|
||||
return Status(kMEInvalidInput, "Output args graph is nullptr.");
|
||||
}
|
||||
|
||||
std::string file_path;
|
||||
|
@ -193,18 +193,28 @@ Status Serialization::Load(const std::vector<char> &file, ModelType model_type,
|
|||
}
|
||||
auto graph_data = std::make_shared<Graph::GraphData>(anf_graph, kMindIR);
|
||||
#if !defined(_WIN32) && !defined(_WIN64)
|
||||
// Config preprocessor, temporary way to let mindspore.so depends on _c_dataengine
|
||||
std::string preprocessor = LoadPreprocess(file_path);
|
||||
if (!preprocessor.empty()) {
|
||||
std::string dataengine_so_path;
|
||||
Status dlret = DLSoPath(&dataengine_so_path);
|
||||
CHECK_FAIL_AND_RELEASE(dlret, nullptr, "Parse dataengine_so failed: " + dlret.GetErrDescription());
|
||||
|
||||
void *handle = nullptr;
|
||||
void *function = nullptr;
|
||||
dlret = DLSoOpen(dataengine_so_path, "ParseMindIRPreprocess_C", &handle, &function);
|
||||
CHECK_FAIL_AND_RELEASE(dlret, handle, "Parse ParseMindIRPreprocess_C failed: " + dlret.GetErrDescription());
|
||||
|
||||
auto ParseMindIRPreprocessFun =
|
||||
(void (*)(const std::string &, const std::string &, std::vector<std::shared_ptr<mindspore::dataset::Execute>> *,
|
||||
Status *))(function);
|
||||
|
||||
std::vector<std::shared_ptr<dataset::Execute>> data_graph;
|
||||
status = dataset::Serdes::ParseMindIRPreprocess(preprocessor, "image", &data_graph);
|
||||
if (status != kSuccess) {
|
||||
MS_LOG(ERROR) << status.GetErrDescription();
|
||||
return status;
|
||||
}
|
||||
ParseMindIRPreprocessFun(preprocessor, "image", &data_graph, &dlret);
|
||||
CHECK_FAIL_AND_RELEASE(dlret, handle, "Load preprocess failed: " + dlret.GetErrDescription());
|
||||
DLSoClose(handle);
|
||||
if (!data_graph.empty()) {
|
||||
graph_data->SetPreprocess(data_graph);
|
||||
} else {
|
||||
MS_LOG(WARNING) << "Load preprocess failed, no data preprocess operations found in MindIR.";
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
@ -230,9 +240,8 @@ Status Serialization::Load(const std::vector<std::vector<char>> &files, ModelTyp
|
|||
std::vector<Graph> *graphs, const Key &dec_key, const std::vector<char> &dec_mode) {
|
||||
std::stringstream err_msg;
|
||||
if (graphs == nullptr) {
|
||||
err_msg << "Output args graph is nullptr.";
|
||||
MS_LOG(ERROR) << err_msg.str();
|
||||
return Status(kMEInvalidInput, err_msg.str());
|
||||
MS_LOG(ERROR) << "Output args graph is nullptr.";
|
||||
return Status(kMEInvalidInput, "Output args graph is nullptr.");
|
||||
}
|
||||
|
||||
if (files.size() == 1) {
|
||||
|
@ -266,6 +275,21 @@ Status Serialization::Load(const std::vector<std::vector<char>> &files, ModelTyp
|
|||
MS_LOG(ERROR) << err_msg.str();
|
||||
return Status(kMEInvalidInput, err_msg.str());
|
||||
}
|
||||
#if !defined(_WIN32) && !defined(_WIN64)
|
||||
// Dataset so loading
|
||||
std::string dataengine_so_path;
|
||||
Status dlret = DLSoPath(&dataengine_so_path);
|
||||
CHECK_FAIL_AND_RELEASE(dlret, nullptr, "Parse dataengine_so failed: " + dlret.GetErrDescription());
|
||||
|
||||
void *handle = nullptr;
|
||||
void *function = nullptr;
|
||||
dlret = DLSoOpen(dataengine_so_path, "ParseMindIRPreprocess_C", &handle, &function);
|
||||
CHECK_FAIL_AND_RELEASE(dlret, handle, "Parse ParseMindIRPreprocess_C failed: " + dlret.GetErrDescription());
|
||||
|
||||
auto ParseMindIRPreprocessFun =
|
||||
(void (*)(const std::string &, const std::string &, std::vector<std::shared_ptr<mindspore::dataset::Execute>> *,
|
||||
Status *))(function);
|
||||
#endif
|
||||
std::vector<Graph> results;
|
||||
for (size_t i = 0; i < anf_graphs.size(); ++i) {
|
||||
if (anf_graphs[i] == nullptr) {
|
||||
|
@ -278,25 +302,25 @@ Status Serialization::Load(const std::vector<std::vector<char>> &files, ModelTyp
|
|||
return Status(kMEInvalidInput, err_msg.str());
|
||||
}
|
||||
auto graph_data = std::make_shared<Graph::GraphData>(anf_graphs[i], kMindIR);
|
||||
|
||||
#if !defined(_WIN32) && !defined(_WIN64)
|
||||
// Config preprocessor, temporary way to let mindspore.so depends on _c_dataengine
|
||||
std::string preprocessor = LoadPreprocess(files_path[i]);
|
||||
if (!preprocessor.empty()) {
|
||||
std::vector<std::shared_ptr<dataset::Execute>> data_graph;
|
||||
auto status = dataset::Serdes::ParseMindIRPreprocess(preprocessor, "image", &data_graph);
|
||||
if (status != kSuccess) {
|
||||
MS_LOG(ERROR) << status.GetErrDescription();
|
||||
return status;
|
||||
}
|
||||
ParseMindIRPreprocessFun(preprocessor, "image", &data_graph, &dlret);
|
||||
CHECK_FAIL_AND_RELEASE(dlret, handle, "Load preprocess failed: " + dlret.GetErrDescription());
|
||||
if (!data_graph.empty()) {
|
||||
graph_data->SetPreprocess(data_graph);
|
||||
} else {
|
||||
MS_LOG(WARNING) << "Load preprocess failed, no data preprocess operations found in MindIR.";
|
||||
}
|
||||
}
|
||||
#endif
|
||||
results.emplace_back(graph_data);
|
||||
}
|
||||
|
||||
#if !defined(_WIN32) && !defined(_WIN64)
|
||||
// Dataset so release
|
||||
DLSoClose(handle);
|
||||
#endif
|
||||
*graphs = std::move(results);
|
||||
return kSuccess;
|
||||
}
|
||||
|
|
|
@ -14,10 +14,12 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
#include "include/api/types.h"
|
||||
#include <fstream>
|
||||
#include <numeric>
|
||||
#include "securec/include/securec.h"
|
||||
#include "mindspore/core/ir/api_tensor_impl.h"
|
||||
#include "mindspore/core/utils/convert_utils_base.h"
|
||||
#include "utils/file_utils.h"
|
||||
|
||||
namespace mindspore {
|
||||
class Buffer::Impl {
|
||||
|
@ -184,6 +186,69 @@ MSTensor *MSTensor::CreateDevTensor(const std::vector<char> &name, enum DataType
|
|||
}
|
||||
}
|
||||
|
||||
MSTensor *MSTensor::CreateImageTensor(const std::vector<char> &image_file) noexcept {
|
||||
std::string image_file_str = CharToString(image_file);
|
||||
|
||||
try {
|
||||
auto realpath = FileUtils::GetRealPath(image_file_str.c_str());
|
||||
if (!realpath.has_value()) {
|
||||
MS_LOG(ERROR) << "Get real path failed, path=" << image_file_str;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Read image file
|
||||
auto file = realpath.value();
|
||||
if (file.empty()) {
|
||||
MS_LOG(ERROR) << "can not find any input file.";
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
std::ifstream ifs(file, std::ios::in | std::ios::binary);
|
||||
if (!ifs.good()) {
|
||||
MS_LOG(ERROR) << "File: " + file + " does not exist.";
|
||||
return nullptr;
|
||||
}
|
||||
if (!ifs.is_open()) {
|
||||
MS_LOG(ERROR) << "File: " + file + " open failed.";
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
auto &io_seekg1 = ifs.seekg(0, std::ios::end);
|
||||
if (!io_seekg1.good() || io_seekg1.fail() || io_seekg1.bad()) {
|
||||
ifs.close();
|
||||
MS_LOG(ERROR) << "Failed to seekg file: " + file;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
size_t size = ifs.tellg();
|
||||
MSTensor *ret =
|
||||
new MSTensor(file, mindspore::DataType::kNumberTypeUInt8, {static_cast<int64_t>(size)}, nullptr, size);
|
||||
|
||||
auto &io_seekg2 = ifs.seekg(0, std::ios::beg);
|
||||
if (!io_seekg2.good() || io_seekg2.fail() || io_seekg2.bad()) {
|
||||
ifs.close();
|
||||
MS_LOG(ERROR) << "Failed to seekg file: " + file;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
auto &io_read = ifs.read(reinterpret_cast<char *>(ret->MutableData()), size);
|
||||
if (!io_read.good() || io_read.fail() || io_read.bad()) {
|
||||
ifs.close();
|
||||
MS_LOG(ERROR) << "Failed to read file: " + file;
|
||||
return nullptr;
|
||||
}
|
||||
ifs.close();
|
||||
|
||||
return ret;
|
||||
} catch (const std::bad_alloc &) {
|
||||
MS_LOG(ERROR) << "Malloc memory failed.";
|
||||
return nullptr;
|
||||
} catch (...) {
|
||||
MS_LOG(ERROR) << "Unknown error occurred.";
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
MSTensor *MSTensor::CharStringsToTensor(const std::vector<char> &name, const std::vector<std::vector<char>> &str) {
|
||||
// num(4 bytes) + offset1(4 bytes) + offset2(4 bytes) + ... + data1(str1.len) + data2(str2.len) + ...
|
||||
// str1.len() = offset2 - offset1
|
||||
|
|
|
@ -665,5 +665,41 @@ Status Execute::DeviceMemoryRelease() {
|
|||
return Status::OK();
|
||||
}
|
||||
|
||||
Status Execute::Run(const std::vector<std::shared_ptr<dataset::Execute>> &data_graph,
|
||||
const std::vector<mindspore::MSTensor> &inputs, std::vector<mindspore::MSTensor> *outputs) {
|
||||
std::vector<MSTensor> transform_inputs = inputs;
|
||||
std::vector<MSTensor> transform_outputs;
|
||||
if (!data_graph.empty()) {
|
||||
for (auto exes : data_graph) {
|
||||
CHECK_FAIL_RETURN_UNEXPECTED(exes != nullptr, "Given execute object is null.");
|
||||
Status ret = exes->operator()(transform_inputs, &transform_outputs);
|
||||
if (ret != kSuccess) {
|
||||
MS_LOG(ERROR) << "Run preprocess failed:" << ret.GetErrDescription();
|
||||
return ret;
|
||||
}
|
||||
MS_LOG(DEBUG) << "transform_outputs[0].Shape: " << transform_outputs[0].Shape();
|
||||
transform_inputs = transform_outputs;
|
||||
}
|
||||
*outputs = std::move(transform_outputs);
|
||||
} else {
|
||||
std::string msg = "The set of Executors can not be empty.";
|
||||
MS_LOG(ERROR) << msg;
|
||||
RETURN_STATUS_UNEXPECTED(msg);
|
||||
}
|
||||
return Status::OK();
|
||||
}
|
||||
|
||||
// In the current stage, there is a cyclic dependency between libmindspore.so and c_dataengine.so,
|
||||
// we make a C function here and dlopen by libminspore.so to avoid linking explicitly,
|
||||
// will be fix after decouling libminspore.so into multi submodules
|
||||
extern "C" {
|
||||
// ExecuteRun_C has C-linkage specified, but returns user-defined type 'mindspore::Status' which is incompatible with C
|
||||
void ExecuteRun_C(const std::vector<std::shared_ptr<dataset::Execute>> &data_graph,
|
||||
std::vector<mindspore::MSTensor> &inputs, std::vector<mindspore::MSTensor> *outputs, Status *s) {
|
||||
Status ret = Execute::Run(data_graph, inputs, outputs);
|
||||
*s = Status(ret);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace dataset
|
||||
} // namespace mindspore
|
||||
|
|
|
@ -376,5 +376,18 @@ Status Serdes::ParseMindIRPreprocess(const std::string &dataset_json, const std:
|
|||
return Status::OK();
|
||||
}
|
||||
|
||||
// In the current stage, there is a cyclic dependency between libmindspore.so and c_dataengine.so,
|
||||
// we make a C function here and dlopen by libminspore.so to avoid linking explicitly,
|
||||
// will be fix after decouling libminspore.so into multi submodules
|
||||
extern "C" {
|
||||
// ParseMindIRPreprocess_C has C-linkage specified, but returns user-defined type 'mindspore::Status'
|
||||
// which is incompatible with C
|
||||
void ParseMindIRPreprocess_C(const std::string &dataset_json, const std::string &process_column,
|
||||
std::vector<std::shared_ptr<mindspore::dataset::Execute>> *data_graph, Status *s) {
|
||||
Status ret = Serdes::ParseMindIRPreprocess(dataset_json, process_column, data_graph);
|
||||
*s = Status(ret);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace dataset
|
||||
} // namespace mindspore
|
||||
|
|
|
@ -106,6 +106,10 @@ class Execute {
|
|||
/// \return Status error code, returns OK if no error encountered.
|
||||
Status operator()(const std::vector<mindspore::MSTensor> &input_tensor_list, std::vector<mindspore::MSTensor> *out);
|
||||
|
||||
/// \brief Given a set of Executes, run them
|
||||
static Status Run(const std::vector<std::shared_ptr<dataset::Execute>> &data_graph,
|
||||
const std::vector<mindspore::MSTensor> &inputs, std::vector<mindspore::MSTensor> *outputs);
|
||||
|
||||
/// \brief The function to release device memory on Ascend310.
|
||||
Status DeviceMemoryRelease();
|
||||
|
||||
|
|
|
@ -109,12 +109,22 @@ Status Model::Predict(const std::vector<MSTensor> &inputs, std::vector<MSTensor>
|
|||
return impl_->Predict(inputs, outputs, before, after);
|
||||
}
|
||||
|
||||
Status Model::Predict(const std::vector<char> &input, std::vector<MSTensor> *outputs, const MSKernelCallBack &before,
|
||||
const MSKernelCallBack &after) {
|
||||
Status Model::PredictWithPreprocess(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs,
|
||||
const MSKernelCallBack &before, const MSKernelCallBack &after) {
|
||||
MS_LOG(ERROR) << "Unsupported Feature.";
|
||||
return kLiteNotSupport;
|
||||
}
|
||||
|
||||
Status Model::Preprocess(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs) {
|
||||
MS_LOG(ERROR) << "Unsupported Feature.";
|
||||
return kLiteNotSupport;
|
||||
}
|
||||
|
||||
bool Model::HasPreprocess() {
|
||||
MS_LOG(ERROR) << "Unsupported Feature.";
|
||||
return false;
|
||||
}
|
||||
|
||||
Model::Model() : impl_(nullptr) {}
|
||||
|
||||
Model::~Model() {}
|
||||
|
|
|
@ -13,4 +13,4 @@ file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*)
|
|||
add_executable(main main.cc utils.cc)
|
||||
target_link_libraries(main ${MS_LIB} ${MD_LIB} gflags)
|
||||
add_executable(main_preprocess main_preprocess.cc utils.cc)
|
||||
target_link_libraries(main_preprocess ${MS_LIB} ${MD_LIB} gflags)
|
||||
target_link_libraries(main_preprocess ${MS_LIB} gflags)
|
||||
|
|
|
@ -63,14 +63,58 @@ int main(int argc, char **argv) {
|
|||
return 1;
|
||||
}
|
||||
|
||||
std::vector<MSTensor> outputs;
|
||||
ret = model.Predict(FLAGS_image_path, &outputs);
|
||||
std::cout << "Check if data preprocess exists: " << model.HasPreprocess() << std::endl;
|
||||
|
||||
// way 1, construct a common MSTensor
|
||||
std::vector<MSTensor> inputs1 = {ReadFileToTensor(FLAGS_image_path)};
|
||||
std::vector<MSTensor> outputs1;
|
||||
|
||||
ret = model.PredictWithPreprocess(inputs1, &outputs1);
|
||||
if (ret.IsError()) {
|
||||
std::cout << "ERROR: Predict failed." << std::endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
auto shape = outputs[0].Shape();
|
||||
std::ofstream o1("result1.txt", std::ios::out);
|
||||
o1.write(reinterpret_cast<const char *>(outputs1[0].MutableData()), std::streamsize(outputs1[0].DataSize()));
|
||||
|
||||
// way 2, construct a pointer of MSTensor, be careful of destroy
|
||||
MSTensor *tensor = MSTensor::CreateImageTensor(FLAGS_image_path);
|
||||
std::vector<MSTensor> inputs2 = {*tensor};
|
||||
MSTensor::DestroyTensorPtr(tensor);
|
||||
std::vector<MSTensor> outputs2;
|
||||
|
||||
ret = model.PredictWithPreprocess(inputs2, &outputs2);
|
||||
if (ret.IsError()) {
|
||||
std::cout << "ERROR: Predict failed." << std::endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
std::ofstream o2("result2.txt", std::ios::out);
|
||||
o2.write(reinterpret_cast<const char *>(outputs2[0].MutableData()), std::streamsize(outputs2[0].DataSize()));
|
||||
|
||||
// way 3, split preprocess and predict
|
||||
std::vector<MSTensor> inputs3 = {ReadFileToTensor(FLAGS_image_path)};
|
||||
std::vector<MSTensor> outputs3;
|
||||
|
||||
ret = model.Preprocess(inputs3, &outputs3);
|
||||
if (ret.IsError()) {
|
||||
std::cout << "ERROR: Preprocess failed." << std::endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
std::vector<MSTensor> outputs4;
|
||||
ret = model.Predict(outputs3, &outputs4);
|
||||
if (ret.IsError()) {
|
||||
std::cout << "ERROR: Preprocess failed." << std::endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
std::ofstream o3("result3.txt", std::ios::out);
|
||||
o3.write(reinterpret_cast<const char *>(outputs4[0].MutableData()), std::streamsize(outputs4[0].DataSize()));
|
||||
|
||||
// check shape
|
||||
auto shape = outputs1[0].Shape();
|
||||
std::cout << "Output Shape: " << std::endl;
|
||||
for (auto s : shape) {
|
||||
std::cout << s << ", ";
|
||||
|
|
Loading…
Reference in New Issue