diff --git a/include/api/model.h b/include/api/model.h index 22c8d2940ff..ac1f9e2428c 100644 --- a/include/api/model.h +++ b/include/api/model.h @@ -75,16 +75,29 @@ class MS_API Model { Status Predict(const std::vector &inputs, std::vector *outputs, const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr); - /// \brief Inference model, only for cv model inference. + /// \brief Inference model with preprocess in model. /// - /// \param[in] inputs A string represents the file path of input image. + /// \param[in] inputs A vector where model inputs are arranged in sequence. /// \param[out] outputs Which is a pointer to a vector. The model outputs are filled in the container in sequence. + /// \param[in] whether to use data preprocess in model. /// \param[in] before CallBack before predict. /// \param[in] after CallBack after predict. /// /// \return Status. - inline Status Predict(const std::string &input, std::vector *outputs, - const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr); + Status PredictWithPreprocess(const std::vector &inputs, std::vector *outputs, + const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr); + + /// \brief Apply data preprocess if it exits in model. + /// + /// \param[in] inputs A vector where model inputs are arranged in sequence. + /// \param[out] outputs Which is a pointer to a vector. The model outputs are filled in the container in sequence. + /// + /// \return Status. + Status Preprocess(const std::vector &inputs, std::vector *outputs); + + /// \brief Check if data preprocess exists in model. + /// \return true if data preprocess exists. + bool HasPreprocess(); /// \brief Load config file. /// @@ -201,8 +214,6 @@ class MS_API Model { std::vector> GetOutputTensorNamesChar(); MSTensor GetOutputByTensorName(const std::vector &tensor_name); std::vector GetOutputsByNodeName(const std::vector &node_name); - Status Predict(const std::vector &input, std::vector *outputs, const MSKernelCallBack &before, - const MSKernelCallBack &after); std::shared_ptr impl_; }; @@ -220,10 +231,5 @@ MSTensor Model::GetOutputByTensorName(const std::string &tensor_name) { std::vector Model::GetOutputsByNodeName(const std::string &node_name) { return GetOutputsByNodeName(StringToChar(node_name)); } - -Status Model::Predict(const std::string &input, std::vector *outputs, const MSKernelCallBack &before, - const MSKernelCallBack &after) { - return Predict(StringToChar(input), outputs, before, after); -} } // namespace mindspore #endif // MINDSPORE_INCLUDE_API_MODEL_H diff --git a/include/api/types.h b/include/api/types.h index c652b9e5c4f..26e2e8f49dd 100644 --- a/include/api/types.h +++ b/include/api/types.h @@ -104,6 +104,13 @@ class MS_API MSTensor { static inline MSTensor *CreateDevTensor(const std::string &name, DataType type, const std::vector &shape, const void *data, size_t data_len) noexcept; + /// \brief Creates a MSTensor object from local image file, must be used in pairs with DestroyTensorPtr. + /// + /// \param[in] image_file Path of image file. + /// + /// \return A pointer of MSTensor. + static inline MSTensor *CreateImageTensor(const std::string &image_file) noexcept; + /// \brief Create a string type MSTensor object whose data can be accessed by Model only after being copied, must be /// used in pair with DestroyTensorPtr. /// @@ -261,6 +268,7 @@ class MS_API MSTensor { const void *data, size_t data_len) noexcept; static MSTensor *CreateDevTensor(const std::vector &name, enum DataType type, const std::vector &shape, const void *data, size_t data_len) noexcept; + static MSTensor *CreateImageTensor(const std::vector &image_file) noexcept; static MSTensor *CharStringsToTensor(const std::vector &name, const std::vector> &str); static std::vector> TensorToStringChars(const MSTensor &tensor); @@ -307,6 +315,10 @@ MSTensor *MSTensor::CreateDevTensor(const std::string &name, enum DataType type, return CreateDevTensor(StringToChar(name), type, shape, data, data_len); } +MSTensor *MSTensor::CreateImageTensor(const std::string &image_file) noexcept { + return CreateImageTensor(StringToChar(image_file)); +} + MSTensor *MSTensor::StringsToTensor(const std::string &name, const std::vector &str) { return CharStringsToTensor(StringToChar(name), VectorStringToChar(str)); } diff --git a/mindspore/ccsrc/cxx_api/dlutils.h b/mindspore/ccsrc/cxx_api/dlutils.h new file mode 100644 index 00000000000..e1aea4ee62a --- /dev/null +++ b/mindspore/ccsrc/cxx_api/dlutils.h @@ -0,0 +1,102 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef MINDSPORE_CCSRC_CXX_API_DLUTILS_H_ +#define MINDSPORE_CCSRC_CXX_API_DLUTILS_H_ +#if !defined(_WIN32) && !defined(_WIN64) +#include +#include +#include +#include +#include +#include "utils/file_utils.h" + +namespace mindspore { +inline Status DLSoPath(std::string *so_path) { + if (so_path == nullptr) { + return Status(kMEFailed, "Input so_path can not be nullptr."); + } + Dl_info dl_info; + dladdr(reinterpret_cast(DLSoPath), &dl_info); + std::string libmindspore_so = dl_info.dli_fname; + + auto pos = libmindspore_so.find("libmindspore.so"); + if (pos == std::string::npos) { + return Status(kMEFailed, "Could not find libmindspore.so, check path."); + } + + std::string parent_dir = libmindspore_so.substr(0, pos) + "../"; + std::string c_dataengine_so; + + DIR *dir = opendir(parent_dir.c_str()); + if (dir != nullptr) { + // access all the files and directories within directory + dirent *ent = readdir(dir); + while (ent != nullptr) { + if (std::string(ent->d_name).find("_c_dataengine") != std::string::npos) { + c_dataengine_so = std::string(ent->d_name); + break; + } + ent = readdir(dir); + } + closedir(dir); + } else { + return Status(kMEFailed, "Could not open directory: " + parent_dir); + } + + std::string unreal_path = parent_dir + c_dataengine_so; + auto realpath = FileUtils::GetRealPath(unreal_path.c_str()); + if (!realpath.has_value()) { + return Status(kMEFailed, "Get c_dataengine_so real path failed, path: " + unreal_path); + } + + *so_path = realpath.value(); + return kSuccess; +} + +inline Status DLSoOpen(const std::string &dl_path, const std::string &func_name, void **handle, void **function) { + // do dlopen and export functions from c_dataengine + *handle = dlopen(dl_path.c_str(), RTLD_LAZY | RTLD_LOCAL); + + if (*handle == nullptr) { + return Status(kMEFailed, "dlopen failed, the pointer[handle] is null."); + } + + *function = dlsym(*handle, func_name.c_str()); + if (*function == nullptr) { + return Status(kMEFailed, "Could not find " + func_name + " in " + dl_path); + } + return kSuccess; +} + +inline void DLSoClose(void *handle) { + if (handle != nullptr) { + (void)dlclose(handle); + } +} + +#define CHECK_FAIL_AND_RELEASE(_s, _handle, _e) \ + do { \ + Status __rc = (_s); \ + if (__rc.IsError()) { \ + MS_LOG(ERROR) << (_e); \ + DLSoClose((_handle)); \ + return __rc; \ + } \ + } while (false) + +} // namespace mindspore +#endif +#endif // MINDSPORE_CCSRC_CXX_API_DLUTILS_H_ diff --git a/mindspore/ccsrc/cxx_api/model/model.cc b/mindspore/ccsrc/cxx_api/model/model.cc index a02bb176294..84905a7c63f 100644 --- a/mindspore/ccsrc/cxx_api/model/model.cc +++ b/mindspore/ccsrc/cxx_api/model/model.cc @@ -94,13 +94,29 @@ Status Model::Predict(const std::vector &inputs, std::vector return impl_->Predict(inputs, outputs); } -Status Model::Predict(const std::vector &input, std::vector *outputs, const MSKernelCallBack &before, - const MSKernelCallBack &after) { +Status Model::PredictWithPreprocess(const std::vector &inputs, std::vector *outputs, + const MSKernelCallBack &before, const MSKernelCallBack &after) { if (impl_ == nullptr) { MS_LOG(ERROR) << "Failed because this model has not been built."; return kMCFailed; } - return impl_->Predict(CharToString(input), outputs); + return impl_->PredictWithPreprocess(inputs, outputs); +} + +Status Model::Preprocess(const std::vector &inputs, std::vector *outputs) { + if (impl_ == nullptr) { + MS_LOG(ERROR) << "Failed because this model has not been built."; + return kMCFailed; + } + return impl_->Preprocess(inputs, outputs); +} + +bool Model::HasPreprocess() { + if (impl_ == nullptr) { + MS_LOG(ERROR) << "Failed because this model has not been built."; + return false; + } + return impl_->HasPreprocess(); } std::vector Model::GetInputs() { diff --git a/mindspore/ccsrc/cxx_api/model/model_impl.cc b/mindspore/ccsrc/cxx_api/model/model_impl.cc index 07e8054bc7a..7f93601a981 100644 --- a/mindspore/ccsrc/cxx_api/model/model_impl.cc +++ b/mindspore/ccsrc/cxx_api/model/model_impl.cc @@ -14,9 +14,7 @@ * limitations under the License. */ #include "cxx_api/model/model_impl.h" - -#include -#include "debug/common.h" +#include "cxx_api/dlutils.h" namespace mindspore { Status ModelImpl::Predict(const std::vector &inputs, std::vector *outputs) { @@ -45,78 +43,52 @@ Status ModelImpl::Predict(const std::vector &inputs, std::vector *outputs) { +bool ModelImpl::HasPreprocess() { return graph_->graph_data_->GetPreprocess().empty() ? false : true; } + +Status ModelImpl::Preprocess(const std::vector &inputs, std::vector *outputs) { #if !defined(_WIN32) && !defined(_WIN64) - auto realpath = Common::GetRealPath(input); - if (!realpath.has_value()) { - MS_LOG(ERROR) << "Get real path failed, path=" << input; - return Status(kMEInvalidInput, "Get real path failed, path=" + input); - } - MS_EXCEPTION_IF_NULL(outputs); - - // Read image file - auto file = realpath.value(); - if (file.empty()) { - return Status(kMEInvalidInput, "can not find any input file."); - } - - std::ifstream ifs(file, std::ios::in | std::ios::binary); - if (!ifs.good()) { - return Status(kMEInvalidInput, "File: " + file + " does not exist."); - } - if (!ifs.is_open()) { - return Status(kMEInvalidInput, "File: " + file + " open failed."); - } - - auto &io_seekg1 = ifs.seekg(0, std::ios::end); - if (!io_seekg1.good() || io_seekg1.fail() || io_seekg1.bad()) { - ifs.close(); - return Status(kMEInvalidInput, "Failed to seekg file: " + file); - } - - size_t size = ifs.tellg(); - MSTensor buffer(file, mindspore::DataType::kNumberTypeUInt8, {static_cast(size)}, nullptr, size); - - auto &io_seekg2 = ifs.seekg(0, std::ios::beg); - if (!io_seekg2.good() || io_seekg2.fail() || io_seekg2.bad()) { - ifs.close(); - return Status(kMEInvalidInput, "Failed to seekg file: " + file); - } - - auto &io_read = ifs.read(reinterpret_cast(buffer.MutableData()), size); - if (!io_read.good() || io_read.fail() || io_read.bad()) { - ifs.close(); - return Status(kMEInvalidInput, "Failed to read file: " + file); - } - ifs.close(); + // Config preprocessor, temporary way to let mindspore.so depends on _c_dataengine + std::string dataengine_so_path; + Status dlret = DLSoPath(&dataengine_so_path); + CHECK_FAIL_AND_RELEASE(dlret, nullptr, "Parse dataengine_so failed: " + dlret.GetErrDescription()); // Run preprocess - std::vector transform_inputs; - std::vector transform_outputs; - transform_inputs.emplace_back(std::move(buffer)); - MS_LOG(DEBUG) << "transform_inputs[0].Shape: " << transform_inputs[0].Shape(); - auto preprocessor = graph_->graph_data_->GetPreprocess(); - if (!preprocessor.empty()) { - for (auto exes : preprocessor) { - MS_EXCEPTION_IF_NULL(exes); - Status ret = exes->operator()(transform_inputs, &transform_outputs); - if (ret != kSuccess) { - MS_LOG(ERROR) << "Run preprocess failed."; - return ret; - } - MS_LOG(DEBUG) << "transform_outputs[0].Shape: " << transform_outputs[0].Shape(); - transform_inputs = transform_outputs; - } - } else { - std::string msg = "Attempt to predict with data preprocess, but no preprocess operation is defined in MindIR."; - MS_LOG(ERROR) << msg; - return Status(kMEFailed, msg); + if (!HasPreprocess()) { + return Status(kMEFailed, "Attempt to predict with data preprocessor, but no preprocessor is defined in MindIR."); + } + std::vector> preprocessor = graph_->graph_data_->GetPreprocess(); + + void *handle = nullptr; + void *function = nullptr; + dlret = DLSoOpen(dataengine_so_path, "ExecuteRun_C", &handle, &function); + CHECK_FAIL_AND_RELEASE(dlret, handle, "Parse ExecuteRun_C failed: " + dlret.GetErrDescription()); + + auto ExecuteRun = + (void (*)(const std::vector> &, const std::vector &, + std::vector *, Status *))(function); + ExecuteRun(preprocessor, inputs, outputs, &dlret); + CHECK_FAIL_AND_RELEASE(dlret, handle, "Run preprocess failed: " + dlret.GetErrDescription()); + DLSoClose(handle); + return kSuccess; +#else + MS_LOG(ERROR) << "Data preprocess is not supported on Windows yet."; + return Status(kMEFailed, "Data preprocess is not supported on Windows yet."); +#endif +} + +Status ModelImpl::PredictWithPreprocess(const std::vector &inputs, std::vector *outputs) { +#if !defined(_WIN32) && !defined(_WIN64) + // Run preprocess + std::vector preprocess_outputs; + Status ret = Preprocess(inputs, &preprocess_outputs); + if (ret != kSuccess) { + return ret; } // Run prediction - Status ret = Predict(transform_outputs, outputs); + ret = Predict(preprocess_outputs, outputs); if (ret != kSuccess) { - MS_LOG(ERROR) << ret.GetErrDescription(); + MS_LOG(ERROR) << "Run predict failed: " << ret.GetErrDescription(); return ret; } return kSuccess; diff --git a/mindspore/ccsrc/cxx_api/model/model_impl.h b/mindspore/ccsrc/cxx_api/model/model_impl.h index b0329eb422d..5de592d557b 100644 --- a/mindspore/ccsrc/cxx_api/model/model_impl.h +++ b/mindspore/ccsrc/cxx_api/model/model_impl.h @@ -39,13 +39,17 @@ class ModelImpl { virtual Status Predict(const std::vector &inputs, std::vector *outputs); - virtual Status Predict(const std::string &input, std::vector *outputs); + virtual Status PredictWithPreprocess(const std::vector &inputs, std::vector *outputs); virtual std::vector GetInputs() = 0; virtual std::vector GetOutputs() = 0; virtual bool CheckModelSupport(enum ModelType model_type) { return false; } + virtual Status Preprocess(const std::vector &inputs, std::vector *outputs); + + virtual bool HasPreprocess(); + protected: FuncGraphPtr GetFuncGraph() const { if (graph_->ModelType() != ModelType::kMindIR) { diff --git a/mindspore/ccsrc/cxx_api/serialization.cc b/mindspore/ccsrc/cxx_api/serialization.cc index b6c9b72351e..31fd54df0f3 100644 --- a/mindspore/ccsrc/cxx_api/serialization.cc +++ b/mindspore/ccsrc/cxx_api/serialization.cc @@ -20,6 +20,7 @@ #include "utils/log_adapter.h" #include "mindspore/core/load_mindir/load_model.h" #if !defined(_WIN32) && !defined(_WIN64) +#include "cxx_api/dlutils.h" #include "minddata/dataset/engine/serdes.h" #include "minddata/dataset/include/dataset/execute.h" #endif @@ -160,9 +161,8 @@ Status Serialization::Load(const std::vector &file, ModelType model_type, const std::vector &dec_mode) { std::stringstream err_msg; if (graph == nullptr) { - err_msg << "Output args graph is nullptr."; - MS_LOG(ERROR) << err_msg.str(); - return Status(kMEInvalidInput, err_msg.str()); + MS_LOG(ERROR) << "Output args graph is nullptr."; + return Status(kMEInvalidInput, "Output args graph is nullptr."); } std::string file_path; @@ -193,18 +193,28 @@ Status Serialization::Load(const std::vector &file, ModelType model_type, } auto graph_data = std::make_shared(anf_graph, kMindIR); #if !defined(_WIN32) && !defined(_WIN64) + // Config preprocessor, temporary way to let mindspore.so depends on _c_dataengine std::string preprocessor = LoadPreprocess(file_path); if (!preprocessor.empty()) { + std::string dataengine_so_path; + Status dlret = DLSoPath(&dataengine_so_path); + CHECK_FAIL_AND_RELEASE(dlret, nullptr, "Parse dataengine_so failed: " + dlret.GetErrDescription()); + + void *handle = nullptr; + void *function = nullptr; + dlret = DLSoOpen(dataengine_so_path, "ParseMindIRPreprocess_C", &handle, &function); + CHECK_FAIL_AND_RELEASE(dlret, handle, "Parse ParseMindIRPreprocess_C failed: " + dlret.GetErrDescription()); + + auto ParseMindIRPreprocessFun = + (void (*)(const std::string &, const std::string &, std::vector> *, + Status *))(function); + std::vector> data_graph; - status = dataset::Serdes::ParseMindIRPreprocess(preprocessor, "image", &data_graph); - if (status != kSuccess) { - MS_LOG(ERROR) << status.GetErrDescription(); - return status; - } + ParseMindIRPreprocessFun(preprocessor, "image", &data_graph, &dlret); + CHECK_FAIL_AND_RELEASE(dlret, handle, "Load preprocess failed: " + dlret.GetErrDescription()); + DLSoClose(handle); if (!data_graph.empty()) { graph_data->SetPreprocess(data_graph); - } else { - MS_LOG(WARNING) << "Load preprocess failed, no data preprocess operations found in MindIR."; } } #endif @@ -230,9 +240,8 @@ Status Serialization::Load(const std::vector> &files, ModelTyp std::vector *graphs, const Key &dec_key, const std::vector &dec_mode) { std::stringstream err_msg; if (graphs == nullptr) { - err_msg << "Output args graph is nullptr."; - MS_LOG(ERROR) << err_msg.str(); - return Status(kMEInvalidInput, err_msg.str()); + MS_LOG(ERROR) << "Output args graph is nullptr."; + return Status(kMEInvalidInput, "Output args graph is nullptr."); } if (files.size() == 1) { @@ -266,6 +275,21 @@ Status Serialization::Load(const std::vector> &files, ModelTyp MS_LOG(ERROR) << err_msg.str(); return Status(kMEInvalidInput, err_msg.str()); } +#if !defined(_WIN32) && !defined(_WIN64) + // Dataset so loading + std::string dataengine_so_path; + Status dlret = DLSoPath(&dataengine_so_path); + CHECK_FAIL_AND_RELEASE(dlret, nullptr, "Parse dataengine_so failed: " + dlret.GetErrDescription()); + + void *handle = nullptr; + void *function = nullptr; + dlret = DLSoOpen(dataengine_so_path, "ParseMindIRPreprocess_C", &handle, &function); + CHECK_FAIL_AND_RELEASE(dlret, handle, "Parse ParseMindIRPreprocess_C failed: " + dlret.GetErrDescription()); + + auto ParseMindIRPreprocessFun = + (void (*)(const std::string &, const std::string &, std::vector> *, + Status *))(function); +#endif std::vector results; for (size_t i = 0; i < anf_graphs.size(); ++i) { if (anf_graphs[i] == nullptr) { @@ -278,25 +302,25 @@ Status Serialization::Load(const std::vector> &files, ModelTyp return Status(kMEInvalidInput, err_msg.str()); } auto graph_data = std::make_shared(anf_graphs[i], kMindIR); + #if !defined(_WIN32) && !defined(_WIN64) + // Config preprocessor, temporary way to let mindspore.so depends on _c_dataengine std::string preprocessor = LoadPreprocess(files_path[i]); if (!preprocessor.empty()) { std::vector> data_graph; - auto status = dataset::Serdes::ParseMindIRPreprocess(preprocessor, "image", &data_graph); - if (status != kSuccess) { - MS_LOG(ERROR) << status.GetErrDescription(); - return status; - } + ParseMindIRPreprocessFun(preprocessor, "image", &data_graph, &dlret); + CHECK_FAIL_AND_RELEASE(dlret, handle, "Load preprocess failed: " + dlret.GetErrDescription()); if (!data_graph.empty()) { graph_data->SetPreprocess(data_graph); - } else { - MS_LOG(WARNING) << "Load preprocess failed, no data preprocess operations found in MindIR."; } } #endif results.emplace_back(graph_data); } - +#if !defined(_WIN32) && !defined(_WIN64) + // Dataset so release + DLSoClose(handle); +#endif *graphs = std::move(results); return kSuccess; } diff --git a/mindspore/ccsrc/cxx_api/types.cc b/mindspore/ccsrc/cxx_api/types.cc index 5448de2d999..5dd71bab9db 100644 --- a/mindspore/ccsrc/cxx_api/types.cc +++ b/mindspore/ccsrc/cxx_api/types.cc @@ -14,10 +14,12 @@ * limitations under the License. */ #include "include/api/types.h" +#include #include #include "securec/include/securec.h" #include "mindspore/core/ir/api_tensor_impl.h" #include "mindspore/core/utils/convert_utils_base.h" +#include "utils/file_utils.h" namespace mindspore { class Buffer::Impl { @@ -184,6 +186,69 @@ MSTensor *MSTensor::CreateDevTensor(const std::vector &name, enum DataType } } +MSTensor *MSTensor::CreateImageTensor(const std::vector &image_file) noexcept { + std::string image_file_str = CharToString(image_file); + + try { + auto realpath = FileUtils::GetRealPath(image_file_str.c_str()); + if (!realpath.has_value()) { + MS_LOG(ERROR) << "Get real path failed, path=" << image_file_str; + return nullptr; + } + + // Read image file + auto file = realpath.value(); + if (file.empty()) { + MS_LOG(ERROR) << "can not find any input file."; + return nullptr; + } + + std::ifstream ifs(file, std::ios::in | std::ios::binary); + if (!ifs.good()) { + MS_LOG(ERROR) << "File: " + file + " does not exist."; + return nullptr; + } + if (!ifs.is_open()) { + MS_LOG(ERROR) << "File: " + file + " open failed."; + return nullptr; + } + + auto &io_seekg1 = ifs.seekg(0, std::ios::end); + if (!io_seekg1.good() || io_seekg1.fail() || io_seekg1.bad()) { + ifs.close(); + MS_LOG(ERROR) << "Failed to seekg file: " + file; + return nullptr; + } + + size_t size = ifs.tellg(); + MSTensor *ret = + new MSTensor(file, mindspore::DataType::kNumberTypeUInt8, {static_cast(size)}, nullptr, size); + + auto &io_seekg2 = ifs.seekg(0, std::ios::beg); + if (!io_seekg2.good() || io_seekg2.fail() || io_seekg2.bad()) { + ifs.close(); + MS_LOG(ERROR) << "Failed to seekg file: " + file; + return nullptr; + } + + auto &io_read = ifs.read(reinterpret_cast(ret->MutableData()), size); + if (!io_read.good() || io_read.fail() || io_read.bad()) { + ifs.close(); + MS_LOG(ERROR) << "Failed to read file: " + file; + return nullptr; + } + ifs.close(); + + return ret; + } catch (const std::bad_alloc &) { + MS_LOG(ERROR) << "Malloc memory failed."; + return nullptr; + } catch (...) { + MS_LOG(ERROR) << "Unknown error occurred."; + return nullptr; + } +} + MSTensor *MSTensor::CharStringsToTensor(const std::vector &name, const std::vector> &str) { // num(4 bytes) + offset1(4 bytes) + offset2(4 bytes) + ... + data1(str1.len) + data2(str2.len) + ... // str1.len() = offset2 - offset1 diff --git a/mindspore/ccsrc/minddata/dataset/api/execute.cc b/mindspore/ccsrc/minddata/dataset/api/execute.cc index 7474f8e5ca2..c894c53482a 100644 --- a/mindspore/ccsrc/minddata/dataset/api/execute.cc +++ b/mindspore/ccsrc/minddata/dataset/api/execute.cc @@ -665,5 +665,41 @@ Status Execute::DeviceMemoryRelease() { return Status::OK(); } +Status Execute::Run(const std::vector> &data_graph, + const std::vector &inputs, std::vector *outputs) { + std::vector transform_inputs = inputs; + std::vector transform_outputs; + if (!data_graph.empty()) { + for (auto exes : data_graph) { + CHECK_FAIL_RETURN_UNEXPECTED(exes != nullptr, "Given execute object is null."); + Status ret = exes->operator()(transform_inputs, &transform_outputs); + if (ret != kSuccess) { + MS_LOG(ERROR) << "Run preprocess failed:" << ret.GetErrDescription(); + return ret; + } + MS_LOG(DEBUG) << "transform_outputs[0].Shape: " << transform_outputs[0].Shape(); + transform_inputs = transform_outputs; + } + *outputs = std::move(transform_outputs); + } else { + std::string msg = "The set of Executors can not be empty."; + MS_LOG(ERROR) << msg; + RETURN_STATUS_UNEXPECTED(msg); + } + return Status::OK(); +} + +// In the current stage, there is a cyclic dependency between libmindspore.so and c_dataengine.so, +// we make a C function here and dlopen by libminspore.so to avoid linking explicitly, +// will be fix after decouling libminspore.so into multi submodules +extern "C" { +// ExecuteRun_C has C-linkage specified, but returns user-defined type 'mindspore::Status' which is incompatible with C +void ExecuteRun_C(const std::vector> &data_graph, + std::vector &inputs, std::vector *outputs, Status *s) { + Status ret = Execute::Run(data_graph, inputs, outputs); + *s = Status(ret); +} +} + } // namespace dataset } // namespace mindspore diff --git a/mindspore/ccsrc/minddata/dataset/engine/serdes.cc b/mindspore/ccsrc/minddata/dataset/engine/serdes.cc index b7dcf9864cd..3642b8ab44d 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/serdes.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/serdes.cc @@ -376,5 +376,18 @@ Status Serdes::ParseMindIRPreprocess(const std::string &dataset_json, const std: return Status::OK(); } +// In the current stage, there is a cyclic dependency between libmindspore.so and c_dataengine.so, +// we make a C function here and dlopen by libminspore.so to avoid linking explicitly, +// will be fix after decouling libminspore.so into multi submodules +extern "C" { +// ParseMindIRPreprocess_C has C-linkage specified, but returns user-defined type 'mindspore::Status' +// which is incompatible with C +void ParseMindIRPreprocess_C(const std::string &dataset_json, const std::string &process_column, + std::vector> *data_graph, Status *s) { + Status ret = Serdes::ParseMindIRPreprocess(dataset_json, process_column, data_graph); + *s = Status(ret); +} +} + } // namespace dataset } // namespace mindspore diff --git a/mindspore/ccsrc/minddata/dataset/include/dataset/execute.h b/mindspore/ccsrc/minddata/dataset/include/dataset/execute.h index d37d3ec4f4e..17b89d5980d 100644 --- a/mindspore/ccsrc/minddata/dataset/include/dataset/execute.h +++ b/mindspore/ccsrc/minddata/dataset/include/dataset/execute.h @@ -106,6 +106,10 @@ class Execute { /// \return Status error code, returns OK if no error encountered. Status operator()(const std::vector &input_tensor_list, std::vector *out); + /// \brief Given a set of Executes, run them + static Status Run(const std::vector> &data_graph, + const std::vector &inputs, std::vector *outputs); + /// \brief The function to release device memory on Ascend310. Status DeviceMemoryRelease(); diff --git a/mindspore/lite/src/cxx_api/model/model.cc b/mindspore/lite/src/cxx_api/model/model.cc index c1301db6617..6dbd9e99860 100644 --- a/mindspore/lite/src/cxx_api/model/model.cc +++ b/mindspore/lite/src/cxx_api/model/model.cc @@ -109,12 +109,22 @@ Status Model::Predict(const std::vector &inputs, std::vector return impl_->Predict(inputs, outputs, before, after); } -Status Model::Predict(const std::vector &input, std::vector *outputs, const MSKernelCallBack &before, - const MSKernelCallBack &after) { +Status Model::PredictWithPreprocess(const std::vector &inputs, std::vector *outputs, + const MSKernelCallBack &before, const MSKernelCallBack &after) { MS_LOG(ERROR) << "Unsupported Feature."; return kLiteNotSupport; } +Status Model::Preprocess(const std::vector &inputs, std::vector *outputs) { + MS_LOG(ERROR) << "Unsupported Feature."; + return kLiteNotSupport; +} + +bool Model::HasPreprocess() { + MS_LOG(ERROR) << "Unsupported Feature."; + return false; +} + Model::Model() : impl_(nullptr) {} Model::~Model() {} diff --git a/model_zoo/official/cv/resnext/ascend310_infer/src/CMakeLists.txt b/model_zoo/official/cv/resnext/ascend310_infer/src/CMakeLists.txt index a1eb6819e39..c1266efd848 100644 --- a/model_zoo/official/cv/resnext/ascend310_infer/src/CMakeLists.txt +++ b/model_zoo/official/cv/resnext/ascend310_infer/src/CMakeLists.txt @@ -13,4 +13,4 @@ file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) add_executable(main main.cc utils.cc) target_link_libraries(main ${MS_LIB} ${MD_LIB} gflags) add_executable(main_preprocess main_preprocess.cc utils.cc) -target_link_libraries(main_preprocess ${MS_LIB} ${MD_LIB} gflags) +target_link_libraries(main_preprocess ${MS_LIB} gflags) diff --git a/model_zoo/official/cv/resnext/ascend310_infer/src/main_preprocess.cc b/model_zoo/official/cv/resnext/ascend310_infer/src/main_preprocess.cc index f1449c3834d..ae1a9f29d4a 100644 --- a/model_zoo/official/cv/resnext/ascend310_infer/src/main_preprocess.cc +++ b/model_zoo/official/cv/resnext/ascend310_infer/src/main_preprocess.cc @@ -63,14 +63,58 @@ int main(int argc, char **argv) { return 1; } - std::vector outputs; - ret = model.Predict(FLAGS_image_path, &outputs); + std::cout << "Check if data preprocess exists: " << model.HasPreprocess() << std::endl; + + // way 1, construct a common MSTensor + std::vector inputs1 = {ReadFileToTensor(FLAGS_image_path)}; + std::vector outputs1; + + ret = model.PredictWithPreprocess(inputs1, &outputs1); if (ret.IsError()) { std::cout << "ERROR: Predict failed." << std::endl; return 1; } - auto shape = outputs[0].Shape(); + std::ofstream o1("result1.txt", std::ios::out); + o1.write(reinterpret_cast(outputs1[0].MutableData()), std::streamsize(outputs1[0].DataSize())); + + // way 2, construct a pointer of MSTensor, be careful of destroy + MSTensor *tensor = MSTensor::CreateImageTensor(FLAGS_image_path); + std::vector inputs2 = {*tensor}; + MSTensor::DestroyTensorPtr(tensor); + std::vector outputs2; + + ret = model.PredictWithPreprocess(inputs2, &outputs2); + if (ret.IsError()) { + std::cout << "ERROR: Predict failed." << std::endl; + return 1; + } + + std::ofstream o2("result2.txt", std::ios::out); + o2.write(reinterpret_cast(outputs2[0].MutableData()), std::streamsize(outputs2[0].DataSize())); + + // way 3, split preprocess and predict + std::vector inputs3 = {ReadFileToTensor(FLAGS_image_path)}; + std::vector outputs3; + + ret = model.Preprocess(inputs3, &outputs3); + if (ret.IsError()) { + std::cout << "ERROR: Preprocess failed." << std::endl; + return 1; + } + + std::vector outputs4; + ret = model.Predict(outputs3, &outputs4); + if (ret.IsError()) { + std::cout << "ERROR: Preprocess failed." << std::endl; + return 1; + } + + std::ofstream o3("result3.txt", std::ios::out); + o3.write(reinterpret_cast(outputs4[0].MutableData()), std::streamsize(outputs4[0].DataSize())); + + // check shape + auto shape = outputs1[0].Shape(); std::cout << "Output Shape: " << std::endl; for (auto s : shape) { std::cout << s << ", ";