From 8aa6e2aaadcee9c5579d89d18d9d2c312e07edfe Mon Sep 17 00:00:00 2001 From: yeyunpeng Date: Fri, 25 Dec 2020 15:54:54 +0800 Subject: [PATCH] Limit npu compilation platform to arm64 only. --- build.sh | 11 ++++++++--- .../java/app/src/main/native/runtime/ms_config.cpp | 2 +- .../runtime/agent/npu/optimizer/npu_pass_manager.cc | 5 +---- .../runtime/agent/npu/optimizer/npu_pass_manager.h | 2 +- mindspore/lite/src/runtime/kernel/npu/npu_kernel.h | 3 ++- mindspore/lite/src/scheduler.cc | 5 ++++- mindspore/lite/test/run_benchmark_nets.sh | 4 ++-- 7 files changed, 19 insertions(+), 13 deletions(-) diff --git a/build.sh b/build.sh index 775ffac0b92..d49f760f9e9 100755 --- a/build.sh +++ b/build.sh @@ -508,12 +508,18 @@ build_lite() LITE_ENABLE_NPU="on" fi - if [ "${LITE_ENABLE_GPU}" == "on" ] || [ $1 == "arm64" ]; then + if [[ $1 == "arm64" && "X$DEVICE" != "Xcpu" ]]; then LITE_ENABLE_GPU="on" echo "start get opencl" fi + if [ "${LITE_ENABLE_NPU}" == "on" ]; then - checkddk + if [ "${LITE_PLATFORM}" == "arm64" ]; then + checkddk + else + echo "NPU only support platform arm64." + exit 1 + fi fi cd "${BASEPATH}/mindspore/lite" @@ -648,7 +654,6 @@ build_jni_arm32() { build_java() { JAVA_PATH=${BASEPATH}/mindspore/lite/java - LITE_ENABLE_GPU="on" get_version build_lite_java_arm64 build_lite_java_arm32 diff --git a/mindspore/lite/java/java/app/src/main/native/runtime/ms_config.cpp b/mindspore/lite/java/java/app/src/main/native/runtime/ms_config.cpp index b4bf610665c..09d1892c681 100644 --- a/mindspore/lite/java/java/app/src/main/native/runtime/ms_config.cpp +++ b/mindspore/lite/java/java/app/src/main/native/runtime/ms_config.cpp @@ -39,7 +39,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_com_mindspore_lite_config_MSConfig_creat break; } case 2: // DT_NPU - MS_LOGE("We only support CPU now."); + MS_LOGE("We only support CPU and GPU now."); return (jlong)context; break; default: diff --git a/mindspore/lite/src/runtime/agent/npu/optimizer/npu_pass_manager.cc b/mindspore/lite/src/runtime/agent/npu/optimizer/npu_pass_manager.cc index 1424773854f..ab555c599df 100644 --- a/mindspore/lite/src/runtime/agent/npu/optimizer/npu_pass_manager.cc +++ b/mindspore/lite/src/runtime/agent/npu/optimizer/npu_pass_manager.cc @@ -20,10 +20,7 @@ namespace mindspore::lite { void NPUPassManager::AddPass(NPUBasePass *pass) { all_pass_.push_back(pass); } -int NPUPassManager::Run(const InnerContext *context) { - if (!context->IsNpuEnabled()) { - return RET_OK; - } +int NPUPassManager::Run() { for (auto pass : all_pass_) { auto ret = pass->Run(); if (ret != RET_OK) { diff --git a/mindspore/lite/src/runtime/agent/npu/optimizer/npu_pass_manager.h b/mindspore/lite/src/runtime/agent/npu/optimizer/npu_pass_manager.h index 6dc2026af2d..945c3a65b5a 100644 --- a/mindspore/lite/src/runtime/agent/npu/optimizer/npu_pass_manager.h +++ b/mindspore/lite/src/runtime/agent/npu/optimizer/npu_pass_manager.h @@ -31,7 +31,7 @@ class NPUPassManager { void AddPass(NPUBasePass *pass); - int Run(const InnerContext *context); + int Run(); void Clear(); diff --git a/mindspore/lite/src/runtime/kernel/npu/npu_kernel.h b/mindspore/lite/src/runtime/kernel/npu/npu_kernel.h index d87d4539376..8e06b4ada1c 100644 --- a/mindspore/lite/src/runtime/kernel/npu/npu_kernel.h +++ b/mindspore/lite/src/runtime/kernel/npu/npu_kernel.h @@ -54,7 +54,8 @@ kernel::LiteKernel *NPUKernelCreator(const std::vector &inputs, const lite::InnerContext *ctx, const kernel::KernelKey &desc, const mindspore::lite::PrimitiveC *primitive) { if (!primitive->infer_flag()) { - MS_LOG(ERROR) << "NPU does not support runtime inference shape"; + MS_LOG(ERROR) << "NPU does not support runtime inference shape. Type is:" + << schema::EnumNamePrimitiveType(static_cast(primitive->Type())); return nullptr; } diff --git a/mindspore/lite/src/scheduler.cc b/mindspore/lite/src/scheduler.cc index 2dbf1276594..ab2750c4f60 100644 --- a/mindspore/lite/src/scheduler.cc +++ b/mindspore/lite/src/scheduler.cc @@ -567,6 +567,9 @@ void Scheduler::FindAllInoutKernels(const std::vector &ker int Scheduler::RunPass(std::vector *dst_kernels) { int ret = RET_OK; #if SUPPORT_NPU + if (!context_->IsNpuEnabled()) { + return RET_OK; + } auto transform_pass = new NPUTransformPass(context_, dst_kernels, src_tensors_); mindspore::lite::NPUPassManager::GetInstance()->AddPass(transform_pass); auto concat_format_pass = new NPUInsertTransformPass(context_, dst_kernels, src_tensors_); @@ -574,7 +577,7 @@ int Scheduler::RunPass(std::vector *dst_kernels) { auto fusion_pass = new NPUFusionPass(dst_kernels); mindspore::lite::NPUPassManager::GetInstance()->AddPass(fusion_pass); - ret = mindspore::lite::NPUPassManager::GetInstance()->Run(context_); + ret = mindspore::lite::NPUPassManager::GetInstance()->Run(); #endif return ret; } diff --git a/mindspore/lite/test/run_benchmark_nets.sh b/mindspore/lite/test/run_benchmark_nets.sh index b0aff472a02..a242e147839 100644 --- a/mindspore/lite/test/run_benchmark_nets.sh +++ b/mindspore/lite/test/run_benchmark_nets.sh @@ -1316,8 +1316,8 @@ function Run_arm64() { # Run npu converted models: while read line; do - model_name=`echo ${mindspore_line_info}|awk -F ' ' '{print $1}'` - accuracy_limit=`echo ${mindspore_line_info}|awk -F ' ' '{print $2}'` + model_name=`echo ${line}|awk -F ' ' '{print $1}'` + accuracy_limit=`echo ${line}|awk -F ' ' '{print $2}'` echo "mindspore run npu: ${model_name}, accuracy limit:${accuracy_limit}" >> "${run_arm64_log_file}" echo 'cd /data/local/tmp/benchmark_test' > adb_run_cmd.txt echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --device=NPU --modelFile='${model_name}'.ms --inDataFile=/data/local/tmp/input_output/input/'${model_name}'.ms.bin --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out --accuracyThreshold='${accuracy_limit} >> "${run_arm64_log_file}"