forked from mindspore-Ecosystem/mindspore
!30098 [MSLITE] change kMindIR_Opt to kMindIR_Lite
Merge pull request !30098 from ling/clean
This commit is contained in:
commit
f9da3f44dc
|
@ -39,7 +39,7 @@ enum ModelType : uint32_t {
|
|||
kAIR = 1,
|
||||
kOM = 2,
|
||||
kONNX = 3,
|
||||
kMindIR_Opt = 4,
|
||||
kMindIR_Lite = 4,
|
||||
// insert new data type here
|
||||
kUnknownType = 0xFFFFFFFF
|
||||
};
|
||||
|
|
|
@ -74,7 +74,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_com_mindspore_Model_buildByBuffer(JNIEnv
|
|||
return reinterpret_cast<jlong>(nullptr);
|
||||
}
|
||||
mindspore::ModelType c_model_type;
|
||||
if (model_type >= static_cast<int>(mindspore::kMindIR) && model_type <= static_cast<int>(mindspore::kMindIR_Opt)) {
|
||||
if (model_type >= static_cast<int>(mindspore::kMindIR) && model_type <= static_cast<int>(mindspore::kMindIR_Lite)) {
|
||||
c_model_type = static_cast<mindspore::ModelType>(model_type);
|
||||
} else {
|
||||
MS_LOGE("Invalid model type : %d", model_type);
|
||||
|
@ -133,7 +133,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_com_mindspore_Model_buildByPath(JNIEnv *
|
|||
jcharArray key_str, jstring dec_mod) {
|
||||
auto c_model_path = env->GetStringUTFChars(model_path, JNI_FALSE);
|
||||
mindspore::ModelType c_model_type;
|
||||
if (model_type >= static_cast<int>(mindspore::kMindIR) && model_type <= static_cast<int>(mindspore::kMindIR_Opt)) {
|
||||
if (model_type >= static_cast<int>(mindspore::kMindIR) && model_type <= static_cast<int>(mindspore::kMindIR_Lite)) {
|
||||
c_model_type = static_cast<mindspore::ModelType>(model_type);
|
||||
} else {
|
||||
MS_LOGE("Invalid model type : %d", model_type);
|
||||
|
|
|
@ -53,7 +53,7 @@ Status Serialization::Load(const void *model_data, size_t data_size, ModelType m
|
|||
MS_LOG(ERROR) << "graph is nullptr.";
|
||||
return kLiteNullptr;
|
||||
}
|
||||
if (model_type != kMindIR && model_type != kMindIR_Opt) {
|
||||
if (model_type != kMindIR && model_type != kMindIR_Lite) {
|
||||
MS_LOG(ERROR) << "Unsupported IR.";
|
||||
return kLiteInputParamInvalid;
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ Status Serialization::Load(const std::vector<char> &file, ModelType model_type,
|
|||
MS_LOG(ERROR) << "graph is nullptr.";
|
||||
return kLiteNullptr;
|
||||
}
|
||||
if (model_type != kMindIR && model_type != kMindIR_Opt) {
|
||||
if (model_type != kMindIR && model_type != kMindIR_Lite) {
|
||||
MS_LOG(ERROR) << "Unsupported IR.";
|
||||
return kLiteInputParamInvalid;
|
||||
}
|
||||
|
@ -157,7 +157,7 @@ Status Serialization::ExportModel(const Model &model, ModelType model_type, cons
|
|||
MS_LOG(ERROR) << "Model is not TrainModel.";
|
||||
return kLiteError;
|
||||
}
|
||||
if (model_type != kMindIR && model_type != kMindIR_Opt) {
|
||||
if (model_type != kMindIR && model_type != kMindIR_Lite) {
|
||||
MS_LOG(ERROR) << "Unsupported Export Format " << model_type;
|
||||
return kLiteParamInvalid;
|
||||
}
|
||||
|
|
|
@ -1686,7 +1686,7 @@ session::LiteSession *session::LiteSession::CreateSession(const char *model_buf,
|
|||
return nullptr;
|
||||
}
|
||||
auto ret = reinterpret_cast<lite::LiteSession *>(session)->LoadModelAndCompileByBuf(
|
||||
model_buf, mindspore::ModelType::kMindIR_Opt, size);
|
||||
model_buf, mindspore::ModelType::kMindIR_Lite, size);
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "Init session failed";
|
||||
delete session;
|
||||
|
@ -1702,7 +1702,7 @@ session::LiteSession *lite::LiteSession::CreateSession(const std::string &model_
|
|||
return nullptr;
|
||||
}
|
||||
auto ret = reinterpret_cast<lite::LiteSession *>(session)->LoadModelAndCompileByPath(
|
||||
model_path, mindspore::ModelType::kMindIR_Opt);
|
||||
model_path, mindspore::ModelType::kMindIR_Lite);
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "Init session failed";
|
||||
delete session;
|
||||
|
@ -1713,10 +1713,10 @@ session::LiteSession *lite::LiteSession::CreateSession(const std::string &model_
|
|||
|
||||
mindspore::ModelType lite::LiteSession::LoadModelByBuff(const char *model_buf, const size_t &buf_size, char **lite_buf,
|
||||
size_t *size, mindspore::ModelType model_type) {
|
||||
if (model_type == mindspore::ModelType::kMindIR_Opt) {
|
||||
if (model_type == mindspore::ModelType::kMindIR_Lite) {
|
||||
*size = buf_size;
|
||||
*lite_buf = const_cast<char *>(model_buf);
|
||||
return mindspore::ModelType::kMindIR_Opt;
|
||||
return mindspore::ModelType::kMindIR_Lite;
|
||||
}
|
||||
|
||||
if (model_type != mindspore::ModelType::kMindIR) {
|
||||
|
@ -1729,7 +1729,7 @@ mindspore::ModelType lite::LiteSession::LoadModelByBuff(const char *model_buf, c
|
|||
MS_LOG(DEBUG) << "The kMindIR type model buffer is valid mslite model buffer";
|
||||
*size = buf_size;
|
||||
*lite_buf = const_cast<char *>(model_buf);
|
||||
return mindspore::ModelType::kMindIR_Opt;
|
||||
return mindspore::ModelType::kMindIR_Lite;
|
||||
}
|
||||
MS_LOG(WARNING) << "Invalid mslite model.";
|
||||
return mindspore::ModelType::kMindIR;
|
||||
|
@ -1738,10 +1738,10 @@ mindspore::ModelType lite::LiteSession::LoadModelByBuff(const char *model_buf, c
|
|||
mindspore::ModelType lite::LiteSession::LoadModelByBuff(const char *model_buf, const size_t &buf_size, char **lite_buf,
|
||||
size_t *size, mindspore::ModelType model_type,
|
||||
const std::shared_ptr<mindspore::Context> &ms_context) {
|
||||
if (model_type == mindspore::ModelType::kMindIR_Opt) {
|
||||
if (model_type == mindspore::ModelType::kMindIR_Lite) {
|
||||
*size = buf_size;
|
||||
*lite_buf = const_cast<char *>(model_buf);
|
||||
return mindspore::ModelType::kMindIR_Opt;
|
||||
return mindspore::ModelType::kMindIR_Lite;
|
||||
}
|
||||
|
||||
if (model_type != mindspore::ModelType::kMindIR) {
|
||||
|
@ -1754,7 +1754,7 @@ mindspore::ModelType lite::LiteSession::LoadModelByBuff(const char *model_buf, c
|
|||
MS_LOG(DEBUG) << "The kMindIR type model buffer is valid mslite model buffer";
|
||||
*size = buf_size;
|
||||
*lite_buf = const_cast<char *>(model_buf);
|
||||
return mindspore::ModelType::kMindIR_Opt;
|
||||
return mindspore::ModelType::kMindIR_Lite;
|
||||
}
|
||||
MS_LOG(WARNING) << "Invalid mslite model.";
|
||||
|
||||
|
|
|
@ -204,7 +204,7 @@ TEST_F(MixDataTypeTest, mix1) {
|
|||
auto status = impl->LoadConfig("MixDataTypeTestConfig");
|
||||
ASSERT_EQ(status, kSuccess);
|
||||
|
||||
status = impl->Build(flat_model, size, kMindIR_Opt, context);
|
||||
status = impl->Build(flat_model, size, kMindIR_Lite, context);
|
||||
ASSERT_EQ(status, kSuccess);
|
||||
|
||||
/* check */
|
||||
|
|
|
@ -363,7 +363,7 @@ TEST_F(MultipleDeviceTest, NewApi1) {
|
|||
context->MutableDeviceInfo().push_back(std::make_shared<mindspore::GPUDeviceInfo>());
|
||||
|
||||
mindspore::Model *model = new mindspore::Model();
|
||||
auto ret = model->Build(content, size, mindspore::kMindIR_Opt, context);
|
||||
auto ret = model->Build(content, size, mindspore::kMindIR_Lite, context);
|
||||
ASSERT_EQ(false, ret.IsOk());
|
||||
|
||||
delete model;
|
||||
|
@ -422,7 +422,7 @@ TEST_F(MultipleDeviceTest, NewApi5) {
|
|||
context->MutableDeviceInfo().push_back(std::make_shared<mindspore::CPUDeviceInfo>());
|
||||
|
||||
auto model_impl = std::make_shared<mindspore::ModelImpl>();
|
||||
auto ret = model_impl->Build(content, size, mindspore::kMindIR_Opt, context);
|
||||
auto ret = model_impl->Build(content, size, mindspore::kMindIR_Lite, context);
|
||||
ASSERT_EQ(mindspore::kSuccess, ret.StatusCode());
|
||||
|
||||
CheckResult(reinterpret_cast<const mindspore::lite::LiteSession *>(model_impl->GetSession())->get_kernels(),
|
||||
|
@ -467,7 +467,7 @@ TEST_F(MultipleDeviceTest, NewApi6) {
|
|||
context->MutableDeviceInfo().push_back(std::make_shared<mindspore::GPUDeviceInfo>());
|
||||
|
||||
auto model_impl = std::make_shared<mindspore::ModelImpl>();
|
||||
auto ret = model_impl->Build(content, size, mindspore::kMindIR_Opt, context);
|
||||
auto ret = model_impl->Build(content, size, mindspore::kMindIR_Lite, context);
|
||||
ASSERT_EQ(mindspore::kSuccess, ret.StatusCode());
|
||||
|
||||
CheckResult(reinterpret_cast<const mindspore::lite::LiteSession *>(model_impl->GetSession())->get_kernels(),
|
||||
|
@ -511,7 +511,7 @@ TEST_F(MultipleDeviceTest, NewApi7) {
|
|||
context->MutableDeviceInfo().push_back(std::make_shared<mindspore::GPUDeviceInfo>());
|
||||
|
||||
auto model_impl = std::make_shared<mindspore::ModelImpl>();
|
||||
auto ret = model_impl->Build(content, size, mindspore::kMindIR_Opt, context);
|
||||
auto ret = model_impl->Build(content, size, mindspore::kMindIR_Lite, context);
|
||||
ASSERT_EQ(mindspore::kSuccess, ret.StatusCode());
|
||||
|
||||
CheckResult(reinterpret_cast<const mindspore::lite::LiteSession *>(model_impl->GetSession())->get_kernels(),
|
||||
|
@ -536,7 +536,7 @@ TEST_F(MultipleDeviceTest, NewApi8) {
|
|||
context->MutableDeviceInfo().push_back(std::make_shared<mindspore::KirinNPUDeviceInfo>());
|
||||
|
||||
auto model_impl = std::make_shared<mindspore::ModelImpl>();
|
||||
auto ret = model_impl->Build(content, size, mindspore::kMindIR_Opt, context);
|
||||
auto ret = model_impl->Build(content, size, mindspore::kMindIR_Lite, context);
|
||||
ASSERT_EQ(mindspore::kSuccess, ret.StatusCode());
|
||||
|
||||
CheckResult(reinterpret_cast<const mindspore::lite::LiteSession *>(model_impl->GetSession())->get_kernels(),
|
||||
|
|
|
@ -206,7 +206,7 @@ TEST_F(TestRegistryCustomOp, TestCustomAdd) {
|
|||
|
||||
// build a model
|
||||
auto model = std::make_shared<mindspore::Model>();
|
||||
auto ret = model->Build(content, size, kMindIR_Opt, context);
|
||||
auto ret = model->Build(content, size, kMindIR_Lite, context);
|
||||
ASSERT_EQ(kSuccess, ret.StatusCode());
|
||||
auto inputs = model->GetInputs();
|
||||
ASSERT_EQ(inputs.size(), 2);
|
||||
|
|
|
@ -515,7 +515,7 @@ TEST_F(TestGPURegistryCustomOp, TestGPUCustomAdd) {
|
|||
|
||||
// build a model
|
||||
auto model = std::make_shared<mindspore::Model>();
|
||||
auto ret = model->Build(content, size, kMindIR_Opt, context);
|
||||
auto ret = model->Build(content, size, kMindIR_Lite, context);
|
||||
ASSERT_EQ(kSuccess, ret.StatusCode());
|
||||
auto inputs = model->GetInputs();
|
||||
ASSERT_EQ(inputs.size(), 2);
|
||||
|
|
|
@ -166,7 +166,7 @@ TEST_F(TestRegistry, TestAdd) {
|
|||
|
||||
// build a model
|
||||
auto model = std::make_shared<mindspore::Model>();
|
||||
auto ret = model->Build(content, size, kMindIR_Opt, context);
|
||||
auto ret = model->Build(content, size, kMindIR_Lite, context);
|
||||
ASSERT_EQ(kSuccess, ret.StatusCode());
|
||||
auto inputs = model->GetInputs();
|
||||
ASSERT_EQ(inputs.size(), 2);
|
||||
|
|
|
@ -53,7 +53,7 @@ TEST_F(RuntimeConvert, relu2) {
|
|||
Model model;
|
||||
auto context = std::make_shared<mindspore::Context>();
|
||||
context->MutableDeviceInfo().push_back(std::make_shared<mindspore::CPUDeviceInfo>());
|
||||
Status build_ret = model.Build("./relu.mindir", mindspore::kMindIR_Opt, context);
|
||||
Status build_ret = model.Build("./relu.mindir", mindspore::kMindIR_Lite, context);
|
||||
ASSERT_NE(build_ret, Status::OK());
|
||||
}
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ extern const std::unordered_map<int, std::string> kTypeIdMap;
|
|||
extern const std::unordered_map<mindspore::Format, std::string> kTensorFormatMap;
|
||||
|
||||
const std::unordered_map<std::string, mindspore::ModelType> ModelTypeMap{
|
||||
{"MindIR_Opt", mindspore::ModelType::kMindIR_Opt}, {"MindIR", mindspore::ModelType::kMindIR}};
|
||||
{"MindIR_Lite", mindspore::ModelType::kMindIR_Lite}, {"MindIR", mindspore::ModelType::kMindIR}};
|
||||
|
||||
namespace dump {
|
||||
constexpr auto kConfigPath = "MINDSPORE_DUMP_CONFIG";
|
||||
|
@ -116,7 +116,7 @@ class MS_API BenchmarkFlags : public virtual FlagParser {
|
|||
BenchmarkFlags() {
|
||||
// common
|
||||
AddFlag(&BenchmarkFlags::model_file_, "modelFile", "Input model file", "");
|
||||
AddFlag(&BenchmarkFlags::model_type_, "modelType", "Input model type. MindIR | MindIR_Opt", "MindIR");
|
||||
AddFlag(&BenchmarkFlags::model_type_, "modelType", "Input model type. MindIR | MindIR_Lite", "MindIR");
|
||||
AddFlag(&BenchmarkFlags::in_data_file_, "inDataFile", "Input data file, if not set, use random input", "");
|
||||
AddFlag(&BenchmarkFlags::config_file_, "configFile", "Config file", "");
|
||||
AddFlag(&BenchmarkFlags::device_, "device", "CPU | GPU | NPU | Ascend310 | Ascend710", "CPU");
|
||||
|
|
Loading…
Reference in New Issue