modify return type of Model::Import from std::shared_ptr<Model> to Model

*
This commit is contained in:
hangq 2020-08-08 15:29:42 +08:00
parent 123e43cd02
commit ca6c84b806
9 changed files with 35 additions and 25 deletions

View File

@ -397,7 +397,7 @@ checkndk() {
if [ "${ANDROID_NDK}" ]; then
echo -e "\e[31mANDROID_NDK_PATH=$ANDROID_NDK \e[0m"
else
echo -e "\e[31mplease set ANDROID_NDK_PATH in environment variable for example: export ANDROID_NDK=/root/usr/android-ndk-r20b/ \e[0m"
echo -e "\e[31mplease set ANDROID_NDK in environment variable for example: export ANDROID_NDK=/root/usr/android-ndk-r20b/ \e[0m"
exit 1
fi
}

View File

@ -45,7 +45,7 @@ class MS_API Model {
/// \param[in] size Define bytes numbers of model buffer.
///
/// \return Pointer of MindSpore Lite Model.
static std::shared_ptr<Model> Import(const char *model_buf, size_t size);
static Model *Import(const char *model_buf, size_t size);
/// \brief Constructor of MindSpore Lite Model using default value for parameters.
///
@ -53,7 +53,7 @@ class MS_API Model {
Model() = default;
/// \brief Destructor of MindSpore Lite Model.
virtual ~Model() = default;
virtual ~Model();
/// \brief Get MindSpore Lite Primitive by name.
///
@ -70,13 +70,13 @@ class MS_API Model {
/// \brief Get MindSpore Lite ModelImpl.
///
/// \return A pointer of MindSpore Lite ModelImpl.
std::shared_ptr<ModelImpl> model_impl();
ModelImpl *model_impl();
/// \brief Free MetaGraph in MindSpore Lite Model.
void FreeMetaGraph();
protected:
std::shared_ptr<ModelImpl> model_impl_ = nullptr;
ModelImpl *model_impl_ = nullptr;
};
/// \brief ModelBuilder defined by MindSpore Lite.

View File

@ -24,12 +24,16 @@
namespace mindspore::lite {
std::shared_ptr<Model> Model::Import(const char *model_buf, size_t size) {
auto model = std::make_shared<Model>();
Model *Model::Import(const char *model_buf, size_t size) {
auto model = new Model();
model->model_impl_ = ModelImpl::Import(model_buf, size);
return model;
}
Model::~Model() {
delete(this->model_impl_);
}
lite::Primitive *Model::GetOp(const std::string &name) const {
MS_EXCEPTION_IF_NULL(model_impl_);
return const_cast<Primitive *>(model_impl_->GetOp(name));
@ -45,9 +49,8 @@ const schema::MetaGraph *Model::GetMetaGraph() const {
return model_impl_->GetMetaGraph();
}
std::shared_ptr<ModelImpl> Model::model_impl() {
ModelImpl *Model::model_impl() {
MS_EXCEPTION_IF_NULL(model_impl_);
return this->model_impl_;
}
} // namespace mindspore::lite

View File

@ -20,7 +20,7 @@
#include "utils/log_adapter.h"
namespace mindspore::lite {
std::shared_ptr<ModelImpl> ModelImpl::Import(const char *model_buf, size_t size) {
ModelImpl *ModelImpl::Import(const char *model_buf, size_t size) {
MS_EXCEPTION_IF_NULL(model_buf);
flatbuffers::Verifier verify((const uint8_t *)model_buf, size);
if (!schema::VerifyMetaGraphBuffer(verify)) {
@ -33,7 +33,7 @@ std::shared_ptr<ModelImpl> ModelImpl::Import(const char *model_buf, size_t size)
return nullptr;
}
memcpy(inner_model_buf, model_buf, size);
auto model = std::make_shared<ModelImpl>(inner_model_buf, size);
auto model = new (std::nothrow) ModelImpl(inner_model_buf, size);
if (model == nullptr) {
MS_LOG(ERROR) << "Create modelImpl failed";
return nullptr;

View File

@ -27,7 +27,7 @@ namespace mindspore {
namespace lite {
class ModelImpl {
public:
static std::shared_ptr<ModelImpl> Import(const char *model_buf, size_t size);
static ModelImpl *Import(const char *model_buf, size_t size);
ModelImpl() = default;
explicit ModelImpl(const char *model_buf, size_t size) : model_buf_(model_buf), buf_size_(size) {
meta_graph = schema::GetMetaGraph(model_buf);

View File

@ -109,7 +109,7 @@ TEST_F(InferTest, TestConvNode) {
context->thread_num_ = 4;
auto session = session::LiteSession::CreateSession(context);
ASSERT_NE(nullptr, session);
auto ret = session->CompileGraph(model.get());
auto ret = session->CompileGraph(model);
ASSERT_EQ(lite::RET_OK, ret);
auto inputs = session->GetInputs();
ASSERT_EQ(inputs.size(), 1);
@ -206,7 +206,7 @@ TEST_F(InferTest, TestAddNode) {
context->thread_num_ = 4;
auto session = session::LiteSession::CreateSession(context);
ASSERT_NE(nullptr, session);
auto ret = session->CompileGraph(model.get());
auto ret = session->CompileGraph(model);
ASSERT_EQ(lite::RET_OK, ret);
auto inputs = session->GetInputs();
ASSERT_EQ(inputs.size(), 2);
@ -257,7 +257,7 @@ TEST_F(InferTest, TestModel) {
context->thread_num_ = 4;
auto session = session::LiteSession::CreateSession(context);
ASSERT_NE(nullptr, session);
auto ret = session->CompileGraph(model.get());
auto ret = session->CompileGraph(model);
ASSERT_EQ(lite::RET_OK, ret);
auto inputs = session->GetInputs();
ASSERT_EQ(inputs.size(), 1);

View File

@ -398,10 +398,11 @@ int Benchmark::RunBenchmark(const std::string &deviceType) {
MS_LOG(ERROR) << "CreateSession failed while running %s", modelName.c_str();
return RET_ERROR;
}
auto ret = session->CompileGraph(model.get());
auto ret = session->CompileGraph(model);
if (ret != RET_OK) {
MS_LOG(ERROR) << "CompileGraph failed while running %s", modelName.c_str();
delete (session);
delete (model);
return ret;
}
msInputs = session->GetInputs();
@ -420,6 +421,7 @@ int Benchmark::RunBenchmark(const std::string &deviceType) {
if (status != 0) {
MS_LOG(ERROR) << "Generate input data error";
delete (session);
delete (model);
return status;
}
if (!_flags->calibDataPath.empty()) {
@ -427,6 +429,7 @@ int Benchmark::RunBenchmark(const std::string &deviceType) {
if (status != 0) {
MS_LOG(ERROR) << "Run MarkAccuracy error: %d" << status;
delete (session);
delete (model);
return status;
}
} else {
@ -434,6 +437,7 @@ int Benchmark::RunBenchmark(const std::string &deviceType) {
if (status != 0) {
MS_LOG(ERROR) << "Run MarkPerformance error: %d" << status;
delete (session);
delete (model);
return status;
}
}
@ -448,6 +452,7 @@ int Benchmark::RunBenchmark(const std::string &deviceType) {
}
delete (session);
delete (model);
return RET_OK;
}

View File

@ -920,7 +920,7 @@ STATUS PostTrainingQuantizer::DoQuantize(FuncGraphPtr funcGraph) {
return RET_ERROR;
}
auto ret = session_->CompileGraph(model.get());
auto ret = session_->CompileGraph(model);
if (ret != lite::RET_OK) {
MS_LOG(ERROR) << "compile graph error";
return RET_ERROR;

View File

@ -278,7 +278,7 @@ int TimeProfile::RunTimeProfile() {
}
auto model = lite::Model::Import(graphBuf, size);
auto ret = session_->CompileGraph(model.get());
auto ret = session_->CompileGraph(model);
if (ret != RET_OK) {
MS_LOG(ERROR) << "Compile graph failed.";
return RET_ERROR;
@ -336,6 +336,8 @@ int TimeProfile::RunTimeProfile() {
}
ms_inputs_.clear();
delete graphBuf;
delete session_;
delete model;
return ret;
}