forked from mindspore-Ecosystem/mindspore
Avoid multiple checks
This commit is contained in:
parent
990d06f302
commit
3e7f9e41fe
|
@ -60,10 +60,10 @@ void NPUManager::Reset() {
|
|||
domi::HiaiIrBuild ir_build;
|
||||
for (const auto &model_map : models_) {
|
||||
auto model = model_map.second;
|
||||
if (!model->is_freed) {
|
||||
if (!model->is_freed_) {
|
||||
ir_build.ReleaseModelBuff(*model->model_buffer_data_);
|
||||
model->model_buffer_data_ = nullptr;
|
||||
model->is_freed = true;
|
||||
model->is_freed_ = true;
|
||||
model->desc_.reset();
|
||||
model->desc_ = nullptr;
|
||||
}
|
||||
|
@ -88,12 +88,19 @@ bool NPUManager::CheckDDKVersion() {
|
|||
return true;
|
||||
}
|
||||
bool NPUManager::IsSupportNPU() {
|
||||
if (IsKirinChip() && CheckEMUIVersion() && CheckDDKVersion()) {
|
||||
MS_LOG(INFO) << "The current device support NPU.";
|
||||
return true;
|
||||
// Avoid multiple checks
|
||||
if (!is_check_version_) {
|
||||
is_check_version_ = true;
|
||||
if (IsKirinChip() && CheckEMUIVersion() && CheckDDKVersion()) {
|
||||
is_support_ = true;
|
||||
MS_LOG(INFO) << "The current device support NPU.";
|
||||
} else {
|
||||
is_support_ = false;
|
||||
MS_LOG(INFO) << "The current device NOT SUPPORT NPU.";
|
||||
}
|
||||
return is_support_;
|
||||
} else {
|
||||
MS_LOG(INFO) << "The current device NOT SUPPORT NPU.";
|
||||
return false;
|
||||
return is_support_;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -173,7 +180,7 @@ int NPUManager::LoadOMModel() {
|
|||
}
|
||||
total++;
|
||||
auto model = model_map.second;
|
||||
if (model->is_loaded && model->is_freed) {
|
||||
if (model->is_loaded_ && model->is_freed_) {
|
||||
continue;
|
||||
}
|
||||
models_desc.push_back(model->desc_);
|
||||
|
@ -222,7 +229,7 @@ int NPUManager::LoadModel(const std::shared_ptr<hiai::AiModelMngerClient> &clien
|
|||
|
||||
for (const auto &desc : desc_list) {
|
||||
auto it = models_.find(desc->GetName());
|
||||
it->second->is_loaded = true;
|
||||
it->second->is_loaded_ = true;
|
||||
it->second->client_ = client;
|
||||
}
|
||||
|
||||
|
|
|
@ -36,8 +36,8 @@ struct SubGraphModel {
|
|||
SubGraphModel(int index, std::string model_name, domi::ModelBufferData *model_buffer_data)
|
||||
: index_(index), model_name_(std::move(model_name)), model_buffer_data_(model_buffer_data) {}
|
||||
|
||||
bool is_freed = false;
|
||||
bool is_loaded = false;
|
||||
bool is_freed_ = false;
|
||||
bool is_loaded_ = false;
|
||||
int index_;
|
||||
std::string model_name_;
|
||||
std::shared_ptr<domi::ModelBufferData> model_buffer_data_;
|
||||
|
@ -84,6 +84,8 @@ class NPUManager {
|
|||
|
||||
private:
|
||||
int index_ = 0;
|
||||
bool is_check_version_ = false;
|
||||
bool is_support_ = false;
|
||||
std::unordered_map<std::string, SubGraphModel *> models_;
|
||||
std::vector<std::shared_ptr<hiai::AiModelMngerClient>> clients_;
|
||||
};
|
||||
|
|
|
@ -112,9 +112,6 @@ int NPUInsertTransformPass::InsertPostNode(const InnerContext *context, kernel::
|
|||
}
|
||||
|
||||
int NPUInsertTransformPass::Run() {
|
||||
if (!context_->IsNpuEnabled()) {
|
||||
return RET_OK;
|
||||
}
|
||||
for (size_t i = 0; i < all_kernels_->size(); i++) {
|
||||
auto kernel = (*all_kernels_)[i];
|
||||
if (kernel->desc().arch != kNPU) {
|
||||
|
|
|
@ -20,7 +20,10 @@
|
|||
namespace mindspore::lite {
|
||||
|
||||
void NPUPassManager::AddPass(NPUBasePass *pass) { all_pass_.push_back(pass); }
|
||||
int NPUPassManager::Run() {
|
||||
int NPUPassManager::Run(const InnerContext *context) {
|
||||
if (!context->IsNpuEnabled()) {
|
||||
return RET_OK;
|
||||
}
|
||||
for (auto pass : all_pass_) {
|
||||
auto ret = pass->Run();
|
||||
if (ret != RET_OK) {
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
#define MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_OPTIMIZER_NPU_PASS_MANAGER_H_
|
||||
#include <vector>
|
||||
#include "src/runtime/agent/npu/optimizer/npu_base_pass.h"
|
||||
#include "src/inner_context.h"
|
||||
namespace mindspore::lite {
|
||||
class NPUPassManager {
|
||||
public:
|
||||
|
@ -30,7 +31,7 @@ class NPUPassManager {
|
|||
|
||||
void AddPass(NPUBasePass *pass);
|
||||
|
||||
int Run();
|
||||
int Run(const InnerContext *context);
|
||||
|
||||
void Clear();
|
||||
|
||||
|
|
|
@ -97,9 +97,6 @@ int NPUTransformPass::InsertPostNode(const InnerContext *context, kernel::LiteKe
|
|||
}
|
||||
|
||||
int NPUTransformPass::Run() {
|
||||
if (!context_->IsNpuEnabled()) {
|
||||
return RET_OK;
|
||||
}
|
||||
for (size_t i = 0; i < all_kernels_->size();) {
|
||||
auto kernel = (*all_kernels_)[i];
|
||||
if (kernel->desc().arch != kNPU || npu_trans_nodes.find(kernel->Type()) == npu_trans_nodes.end()) {
|
||||
|
|
|
@ -574,7 +574,7 @@ int Scheduler::RunPass(std::vector<kernel::LiteKernel *> *dst_kernels) {
|
|||
auto fusion_pass = new NPUFusionPass(dst_kernels);
|
||||
mindspore::lite::NPUPassManager::GetInstance()->AddPass(fusion_pass);
|
||||
|
||||
ret = mindspore::lite::NPUPassManager::GetInstance()->Run();
|
||||
ret = mindspore::lite::NPUPassManager::GetInstance()->Run(context_);
|
||||
#endif
|
||||
return ret;
|
||||
}
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
mobilenet_v1_1.0_224.tflite 3
|
||||
squeezenet.tflite 3
|
||||
inception_v3.tflite 3
|
||||
mobilenet_v1_1.0_224.tflite 1.5
|
||||
squeezenet.tflite 1.5
|
||||
inception_v3.tflite 0.5
|
||||
|
|
Loading…
Reference in New Issue