optimize lite test

This commit is contained in:
sunsuodong 2021-08-18 04:42:24 +08:00
parent 6248117b1c
commit e4951c857a
23 changed files with 264 additions and 555 deletions

View File

@ -230,6 +230,8 @@ if(PLATFORM_ARM64)
if(MSLITE_ENABLE_TESTCASES)
install(FILES ${TOP_DIR}/mindspore/lite/build/test/lite-test DESTINATION ${TEST_CASE_DIR}
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(DIRECTORY ${TOP_DIR}/mindspore/lite/build/src/ DESTINATION ${TEST_CASE_DIR}
COMPONENT ${RUNTIME_COMPONENT_NAME} FILES_MATCHING PATTERN "*.so")
install(DIRECTORY ${TOP_DIR}/mindspore/lite/build/minddata/ DESTINATION ${TEST_CASE_DIR}
COMPONENT ${RUNTIME_COMPONENT_NAME} FILES_MATCHING PATTERN "*.so")
install(FILES ${JPEGTURBO_LIB_LIST} DESTINATION ${TEST_CASE_DIR})

View File

@ -325,9 +325,7 @@ include(${TOP_DIR}/cmake/utils.cmake)
include(${TOP_DIR}/cmake/dependency_utils.cmake)
include(${TOP_DIR}/cmake/dependency_securec.cmake)
include(${TOP_DIR}/cmake/external_libs/flatbuffers.cmake)
if(NOT TARGET_HIMIX200 AND NOT TARGET_OHOS_LITE)
include(${TOP_DIR}/cmake/external_libs/openssl.cmake)
endif()
if(MSLITE_GPU_BACKEND STREQUAL opencl)
include(${TOP_DIR}/cmake/external_libs/opencl.cmake)
endif()
@ -422,6 +420,8 @@ if(MSLITE_ENABLE_CONVERTER)
include_directories(${PYTHON_INCLUDE_DIRS})
include(${TOP_DIR}/cmake/external_libs/eigen.cmake)
include(${TOP_DIR}/cmake/external_libs/protobuf.cmake)
include(${TOP_DIR}/cmake/external_libs/glog.cmake)
include(${TOP_DIR}/cmake/external_libs/openssl.cmake)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/tools/converter)
endif()

View File

@ -231,13 +231,13 @@ build_lite() {
compile_nnie_script=${BASEPATH}/mindspore/lite/tools/providers/NNIE/Hi3516D/compile_nnie.sh
cd ${BASEPATH}/../
if [[ "${local_lite_platform}" == "x86_64" ]]; then
sh ${compile_nnie_script} -I x86_64 -b nnie_3516_master_dev -j $THREAD_NUM
sh ${compile_nnie_script} -I x86_64 -b nnie_3516_master -j $THREAD_NUM
if [[ $? -ne 0 ]]; then
echo "compile x86_64 for nnie failed."
exit 1
fi
elif [[ "${local_lite_platform}" == "arm32" ]]; then
sh ${compile_nnie_script} -I arm32 -b nnie_3516_master_dev -j $THREAD_NUM
sh ${compile_nnie_script} -I arm32 -b nnie_3516_master -j $THREAD_NUM
if [[ $? -ne 0 ]]; then
echo "compile arm32 for nnie failed."
exit 1

View File

@ -35,10 +35,6 @@ set(CODER_GENERATOR_SRC
${MICRO_DIR}/coder/generator/component/const_blocks/benchmark.cc
)
set(MINDSPORE_CORE
${TOP_DIR}/mindspore/core/gvar/logging_level.cc
)
set(CODER_OPCODERS_SRC
${MICRO_DIR}/coder/opcoders/file_collector.cc
${MICRO_DIR}/coder/opcoders/op_coder.cc
@ -150,7 +146,7 @@ set(LITE_SRC
${LITE_DIR}/src/tensor.cc
${LITE_DIR}/src/weight_decoder.cc
${LITE_DIR}/src/huffman_decode.cc
${LITE_DIR}/src/common/log_adapter.cc
${LITE_DIR}/src/common/log.cc
${LITE_DIR}/src/common/utils.cc
${LITE_DIR}/../core/utils/status.cc
### tools
@ -164,4 +160,4 @@ set(REGISTRY_SRC
)
list(APPEND FILE_SET ${CODER_SRC} ${CODER_OPCODERS_SRC} ${CODER_GENERATOR_SRC}
${CODER_ALLOCATOR_SRC} ${LITE_SRC} ${MINDSPORE_CORE} ${REGISTRY_SRC})
${CODER_ALLOCATOR_SRC} ${LITE_SRC} ${REGISTRY_SRC})

View File

@ -1,7 +1,3 @@
if(NOT PLATFORM_ARM64 AND NOT PLATFORM_ARM32)
add_definitions(-DUSE_GLOG)
endif()
set(3RD_DIR ${TOP_DIR}/third_party)
set(LITE_DIR ${TOP_DIR}/mindspore/lite)
set(MICRO_DIR ${LITE_DIR}/micro)
@ -35,7 +31,7 @@ if(MSLITE_ENABLE_CONVERTER)
add_executable(codegen main.cc ${FILE_SET})
add_dependencies(codegen fbs_src)
add_dependencies(codegen fbs_inner_src)
target_link_libraries(codegen PRIVATE ${SECUREC_LIBRARY} mindspore::glog wrapper_mid nnacl_mid cpu_ops_mid)
target_link_libraries(codegen PRIVATE ${SECUREC_LIBRARY} wrapper_mid nnacl_mid cpu_ops_mid)
if(ENABLE_MODEL_OBF)
target_link_libraries(codegen PRIVATE
${OBF_LIB_DIR}/libmsdeobfuscator-lite.so)

View File

@ -20,7 +20,7 @@
#include "include/errorcode.h"
#include "include/api/format.h"
#include "include/registry/register_kernel_interface.h"
#include "utils/log_adapter.h"
#include "src/common/log_adapter.h"
using mindspore::kernel::KernelInterface;
using mindspore::lite::RET_ERROR;

View File

@ -290,14 +290,14 @@ if(BUILD_MINDDATA STREQUAL "full")
add_library(minddata-lite SHARED
${MINDDATA_KERNELS_IMAGE_LITE_CV_FILES}
${CMAKE_CURRENT_SOURCE_DIR}/../src/common/log_adapter.cc
${CMAKE_CURRENT_SOURCE_DIR}/../src/common/log.cc
${CORE_DIR}/utils/ms_utils.cc
${MINDDATA_FULL_SRC}
)
add_library(minddata-lite_static STATIC
${MINDDATA_KERNELS_IMAGE_LITE_CV_FILES}
${CMAKE_CURRENT_SOURCE_DIR}/../src/common/log_adapter.cc
${CMAKE_CURRENT_SOURCE_DIR}/../src/common/log.cc
${CORE_DIR}/utils/ms_utils.cc
${MINDDATA_FULL_SRC}
)
@ -436,7 +436,7 @@ elseif(BUILD_MINDDATA STREQUAL "wrapper")
add_library(minddata-lite SHARED
${MINDDATA_KERNELS_IMAGE_LITE_CV_FILES}
${CMAKE_CURRENT_SOURCE_DIR}/../src/common/log_adapter.cc
${CMAKE_CURRENT_SOURCE_DIR}/../src/common/log.cc
${CORE_DIR}/utils/ms_utils.cc
${MINDDATA_TODAPI_SRC}
${MINDSPORE_LITE_CXXAPI_SRC}
@ -533,7 +533,7 @@ elseif(BUILD_MINDDATA STREQUAL "lite")
${MINDDATA_DIR}/util/memory_pool.cc
${MINDDATA_DIR}/util/path.cc
${MINDDATA_DIR}/api/transforms.cc
${CMAKE_CURRENT_SOURCE_DIR}/../src/common/log_adapter.cc
${CMAKE_CURRENT_SOURCE_DIR}/../src/common/log.cc
${CORE_DIR}/utils/ms_utils.cc
${MINDDATA_DIR}/kernels/ir/validators.cc
${MINDDATA_DIR}/kernels/ir/data/transforms_ir.cc

View File

@ -87,7 +87,7 @@ set(LITE_SRC
${CMAKE_CURRENT_SOURCE_DIR}/common/config_file.cc
${CMAKE_CURRENT_SOURCE_DIR}/common/utils.cc
${CMAKE_CURRENT_SOURCE_DIR}/common/graph_util.cc
${CMAKE_CURRENT_SOURCE_DIR}/common/log_adapter.cc
${CMAKE_CURRENT_SOURCE_DIR}/common/log.cc
${CMAKE_CURRENT_SOURCE_DIR}/common/lite_utils.cc
${CMAKE_CURRENT_SOURCE_DIR}/common/prim_util.cc
${CMAKE_CURRENT_SOURCE_DIR}/common/tensor_util.cc
@ -323,15 +323,16 @@ if(BUILD_MINDDATA STREQUAL "lite")
endif()
if(SUPPORT_TRAIN)
add_library(mindspore-lite-train SHARED ${TRAIN_SRC})
add_library(lite_train_src_mid OBJECT ${TRAIN_SRC})
add_dependencies(lite_train_src_mid fbs_src fbs_inner_src)
add_library(mindspore-lite-train SHARED $<TARGET_OBJECTS:lite_train_src_mid>)
set_target_properties(mindspore-lite-train PROPERTIES OUTPUT_NAME "mindspore-lite-train")
add_dependencies(mindspore-lite-train fbs_src fbs_inner_src)
set_target_properties(mindspore-lite-train PROPERTIES CLEAN_DIRECT_OUTPUT 1)
target_link_libraries(mindspore-lite-train minddata-lite mindspore-lite)
add_library(mindspore-lite-train_static STATIC ${TRAIN_SRC})
add_library(mindspore-lite-train_static STATIC $<TARGET_OBJECTS:lite_train_src_mid>)
set_target_properties(mindspore-lite-train_static PROPERTIES OUTPUT_NAME "mindspore-lite-train")
add_dependencies(mindspore-lite-train_static fbs_inner_src)
set_target_properties(mindspore-lite-train_static PROPERTIES CLEAN_DIRECT_OUTPUT 1)
target_link_libraries(mindspore-lite-train_static minddata-lite mindspore-lite)
endif()

View File

@ -1,5 +1,5 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
* Copyright 2019-2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,8 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/log_adapter.h"
#include "src/common/log.h"
#include <cstring>
#include <cstdio>
@ -60,16 +59,15 @@ bool IsPrint(int level) {
#ifdef ENABLE_ARM
#if defined(__ANDROID__) || defined(ANDROID)
// convert MsLogLevel to corresponding android level
static int GetAndroidLogLevel(MsLogLevel level) {
static int GetAndroidLogLevel(LiteLogLevel level) {
switch (level) {
case DEBUG:
case LiteLogLevel::DEBUG:
return ANDROID_LOG_DEBUG;
case INFO:
case LiteLogLevel::INFO:
return ANDROID_LOG_INFO;
case WARNING:
case LiteLogLevel::WARNING:
return ANDROID_LOG_WARN;
case ERROR:
case LiteLogLevel::ERROR:
default:
return ANDROID_LOG_ERROR;
}
@ -77,22 +75,22 @@ static int GetAndroidLogLevel(MsLogLevel level) {
#endif
#endif
const char *EnumStrForMsLogLevel(MsLogLevel level) {
if (level == DEBUG) {
const char *EnumStrForMsLogLevel(LiteLogLevel level) {
if (level == LiteLogLevel::DEBUG) {
return "DEBUG";
} else if (level == INFO) {
} else if (level == LiteLogLevel::INFO) {
return "INFO";
} else if (level == WARNING) {
} else if (level == LiteLogLevel::WARNING) {
return "WARNING";
} else if (level == ERROR) {
} else if (level == LiteLogLevel::ERROR) {
return "ERROR";
} else {
return "NO_LEVEL";
}
}
void LogWriter::OutputLog(const std::ostringstream &msg) const {
if (IsPrint(log_level_)) {
void LiteLogWriter::OutputLog(const std::ostringstream &msg) const {
if (IsPrint(static_cast<int>(log_level_))) {
#if defined(ENABLE_ARM) && (defined(__ANDROID__) || defined(ANDROID))
__android_log_print(GetAndroidLogLevel(log_level_), ANDROID_LOG_TAG, "[%s:%d] %s] %s", location_.file_,
location_.line_, location_.func_, msg.str().c_str());
@ -103,7 +101,7 @@ void LogWriter::OutputLog(const std::ostringstream &msg) const {
}
}
void LogWriter::operator<(const LogStream &stream) const noexcept {
void LiteLogWriter::operator<(const LiteLogStream &stream) const noexcept {
std::ostringstream msg;
msg << stream.sstream_->rdbuf();
OutputLog(msg);

View File

@ -0,0 +1,109 @@
/**
* Copyright 2019-2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_SRC_COMMON_LOG_H_
#define MINDSPORE_LITE_SRC_COMMON_LOG_H_
#include <memory>
#include <sstream>
#include "utils/overload.h"
// NOTICE: when relative path of 'log.h' changed, macro 'LITE_LOG_HEAR_FILE_REL_PATH' must be changed
#define LITE_LOG_HEAR_FILE_REL_PATH "mindspore/lite/src/common/log.h"
// Get start index of file relative path in __FILE__
static constexpr size_t GetRealPathPos() noexcept {
return sizeof(__FILE__) > sizeof(LITE_LOG_HEAR_FILE_REL_PATH) ? sizeof(__FILE__) - sizeof(LITE_LOG_HEAR_FILE_REL_PATH)
: 0;
}
namespace mindspore {
#define LITE_FILE_NAME \
(sizeof(__FILE__) > GetRealPathPos() ? static_cast<const char *>(__FILE__) + GetRealPathPos() \
: static_cast<const char *>(__FILE__))
struct LiteLocationInfo {
LiteLocationInfo(const char *file, int line, const char *func) : file_(file), line_(line), func_(func) {}
~LiteLocationInfo() = default;
const char *file_;
int line_;
const char *func_;
};
class LiteLogStream {
public:
LiteLogStream() { sstream_ = std::make_shared<std::stringstream>(); }
~LiteLogStream() = default;
template <typename T>
LiteLogStream &operator<<(const T &val) noexcept {
(*sstream_) << val;
return *this;
}
LiteLogStream &operator<<(std::ostream &func(std::ostream &os)) noexcept {
(*sstream_) << func;
return *this;
}
friend class LiteLogWriter;
private:
std::shared_ptr<std::stringstream> sstream_;
};
enum class LiteLogLevel : int { DEBUG = 0, INFO, WARNING, ERROR };
class LiteLogWriter {
public:
LiteLogWriter(const LiteLocationInfo &location, mindspore::LiteLogLevel log_level)
: location_(location), log_level_(log_level) {}
~LiteLogWriter() = default;
#ifdef _WIN32
__declspec(dllexport) void operator<(const LiteLogStream &stream) const noexcept;
#else
__attribute__((visibility("default"))) void operator<(const LiteLogStream &stream) const noexcept;
#endif
private:
void OutputLog(const std::ostringstream &msg) const;
LiteLocationInfo location_;
LiteLogLevel log_level_;
};
#define MSLOG_IF(level) \
mindspore::LiteLogWriter(mindspore::LiteLocationInfo(LITE_FILE_NAME, __LINE__, __FUNCTION__), level) < \
mindspore::LiteLogStream()
#define MS_LOG(level) MS_LOG_##level
#define MS_LOG_DEBUG MSLOG_IF(mindspore::LiteLogLevel::DEBUG)
#define MS_LOG_INFO MSLOG_IF(mindspore::LiteLogLevel::INFO)
#define MS_LOG_WARNING MSLOG_IF(mindspore::LiteLogLevel::WARNING)
#define MS_LOG_ERROR MSLOG_IF(mindspore::LiteLogLevel::ERROR)
} // namespace mindspore
#ifdef Debug
#include <cassert>
#define MS_ASSERT(f) assert(f)
#else
#define MS_ASSERT(f) ((void)0)
#endif
#endif // MINDSPORE_LITE_SRC_COMMON_LOG_H_

View File

@ -1,5 +1,5 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
* Copyright 2019-2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,9 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_SRC_COMMON_LOG_ADAPTER_H_
#define MINDSPORE_LITE_SRC_COMMON_LOG_ADAPTER_H_
#ifdef USE_GLOG
#include "utils/log_adapter.h"
#else
#include "src/common/log.h"
#endif // USE_GLOG
namespace mindspore {
const char *const unsupport_string_tensor_log =
"This mindspore-lite library does not support string tensors. Set environment variable MSLITE_STRING_KERNEL to on to "
@ -46,101 +52,5 @@ const char *const unsupport_fp16_log =
"MSLITE_ENABLE_FP16 to on to "
"recompile it.";
} // namespace mindspore
#ifdef USE_GLOG
#include "utils/log_adapter.h"
#else
#include <cstdarg>
#include <cstdint>
#include <functional>
#include <memory>
#include <sstream>
#include <string>
#include "utils/overload.h"
// NOTICE: when relative path of 'log_adapter.h' changed, macro 'LOG_HEAR_FILE_REL_PATH' must be changed
#define LOG_HEAR_FILE_REL_PATH "mindspore/lite/src/common/log_adapter.h"
// Get start index of file relative path in __FILE__
static constexpr size_t GetRealPathPos() noexcept {
return sizeof(__FILE__) > sizeof(LOG_HEAR_FILE_REL_PATH) ? sizeof(__FILE__) - sizeof(LOG_HEAR_FILE_REL_PATH) : 0;
}
namespace mindspore {
#define FILE_NAME \
(sizeof(__FILE__) > GetRealPathPos() ? static_cast<const char *>(__FILE__) + GetRealPathPos() \
: static_cast<const char *>(__FILE__))
struct LocationInfo {
LocationInfo(const char *file, int line, const char *func) : file_(file), line_(line), func_(func) {}
~LocationInfo() = default;
const char *file_;
int line_;
const char *func_;
};
class LogStream {
public:
LogStream() { sstream_ = std::make_shared<std::stringstream>(); }
~LogStream() = default;
template <typename T>
LogStream &operator<<(const T &val) noexcept {
(*sstream_) << val;
return *this;
}
LogStream &operator<<(std::ostream &func(std::ostream &os)) noexcept {
(*sstream_) << func;
return *this;
}
friend class LogWriter;
private:
std::shared_ptr<std::stringstream> sstream_;
};
enum MsLogLevel : int { DEBUG = 0, INFO, WARNING, ERROR };
const char *EnumStrForMsLogLevel(MsLogLevel level);
class LogWriter {
public:
LogWriter(const LocationInfo &location, MsLogLevel log_level) : location_(location), log_level_(log_level) {}
~LogWriter() = default;
#ifdef _WIN32
__declspec(dllexport) void operator<(const LogStream &stream) const noexcept;
#else
__attribute__((visibility("default"))) void operator<(const LogStream &stream) const noexcept;
#endif
private:
void OutputLog(const std::ostringstream &msg) const;
LocationInfo location_;
MsLogLevel log_level_;
};
#define MSLOG_IF(level) \
mindspore::LogWriter(mindspore::LocationInfo(FILE_NAME, __LINE__, __FUNCTION__), level) < mindspore::LogStream()
#define MS_LOG(level) MS_LOG_##level
#define MS_LOG_DEBUG MSLOG_IF(mindspore::DEBUG)
#define MS_LOG_INFO MSLOG_IF(mindspore::INFO)
#define MS_LOG_WARNING MSLOG_IF(mindspore::WARNING)
#define MS_LOG_ERROR MSLOG_IF(mindspore::ERROR)
} // namespace mindspore
#ifdef Debug
#include <cassert>
#define MS_ASSERT(f) assert(f)
#else
#define MS_ASSERT(f) ((void)0)
#endif
#endif // USE_GLOG
#endif // MINDSPORE_LITE_SRC_COMMON_LOG_ADAPTER_H_

View File

@ -274,13 +274,7 @@ int OpenCLRuntime::Init() {
if (gpu_info_.type == MALI || gpu_info_.type == MALI_T || gpu_info_.type == MALI_G) {
clImportMemoryARM = reinterpret_cast<clImportMemoryARMFunc>(dlsym(handle_, "clImportMemoryARM"));
if (clImportMemoryARM == nullptr) {
mindspore::LogWriter(mindspore::LocationInfo((sizeof("_file_name_") > GetRealPathPos()
? static_cast<const char *>("_file_name_") + GetRealPathPos()
: static_cast<const char *>("_file_name_")),
105, "_function_name_"),
mindspore::ERROR) < mindspore::LogStream() << "load func ("
<< "clImportMemoryARM"
<< ") failed!";
MS_LOG(ERROR) << "load func (clImportMemoryARM) failed!";
UnLoadOpenCLLibrary(handle_);
return false;
}

View File

@ -135,7 +135,7 @@ int ScatterNDCPUKernel::ScatterND(int task_id) {
return RET_OK;
}
int offset = task_id * thread_n_stride_;
MS_LOG(ERROR) << "offset " << offset << std::endl;
MS_LOG(ERROR) << "offset " << offset;
auto ret = DoScatterND(output_ptr_, update_ptr_ + offset * unit_size_, output_unit_offsets_.data() + offset,
unit_size_, num_unit_thread);
if (ret != RET_OK) {

View File

@ -1,428 +1,118 @@
set(TOP_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../..)
set(TEST_DIR ${TOP_DIR}/mindspore/lite/test)
set(LITE_DIR ${TOP_DIR}/mindspore/lite)
set(CCSRC_DIR ${TOP_DIR}/mindspore/ccsrc)
set(CONVERTER_DIR ${TOP_DIR}/mindspore/lite/tools/converter)
include_directories(${TOP_DIR})
include_directories(${TEST_DIR})
include_directories(${TOP_DIR}/mindspore/ccsrc/backend/kernel_compiler/cpu)
include(${CMAKE_CURRENT_SOURCE_DIR}/../../../cmake/external_libs/gtest.cmake)
STRING(REPLACE " -fvisibility=hidden " " -fvisibility=default " CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
STRING(REPLACE " -fvisibility=hidden " " -fvisibility=default " CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
include(${TOP_DIR}/cmake/external_libs/gtest.cmake)
if(MSLITE_ENABLE_CONVERTER)
set(CCSRC_SRC
## ccsrc
${CCSRC_DIR}/backend/optimizer/common/pattern_engine.cc
${CCSRC_DIR}/backend/optimizer/common/visit.cc
${CCSRC_DIR}/backend/optimizer/common/optimizer.cc
)
else()
set(TEST_LITE_SRC ${LITE_DIR}/src/common/log_adapter.cc)
add_compile_definitions(USE_ANDROID_LOG)
if(MSLITE_ENABLE_V0)
add_definitions(-DENABLE_V0)
endif()
### cpu kernel
file(GLOB KERNEL_OP_SRC
${LITE_DIR}/src/runtime/kernel/arm/base/*.cc
${LITE_DIR}/src/runtime/kernel/arm/fp32/*.cc
${LITE_DIR}/src/runtime/kernel/arm/int8/*.cc
${LITE_DIR}/src/runtime/kernel/arm/string/*.cc
file(GLOB_RECURSE TEST_UT_SRC
${TEST_DIR}/main.cc
${TEST_DIR}/common/common_test.cc
${TEST_DIR}/ut/src/infer_test.cc
${TEST_DIR}/ut/src/utils_test.cc
${TEST_DIR}/ut/src/scheduler_test.cc
${TEST_DIR}/ut/src/registry/registry_test.cc
${TEST_DIR}/ut/src/registry/registry_custom_op_test.cc
${TEST_DIR}/ut/src/runtime/runtime_pass_tests.cc
${TEST_DIR}/st/multiple_device_test.cc
${TEST_DIR}/st/mindrt_parallel_runtime_test.cc
${TEST_DIR}/st/mix_data_type_test.cc
${TEST_DIR}/ut/nnacl/infer/*.cc
${TEST_DIR}/ut/src/runtime/kernel/arm/common/*.cc
${TEST_DIR}/ut/src/runtime/kernel/arm/fp32/*.cc
${TEST_DIR}/ut/src/runtime/kernel/arm/int8/*.cc
${TEST_DIR}/ut/src/runtime/kernel/arm/string/*.cc
)
if(NOT PLATFORM_ARM AND "${CMAKE_BUILD_TYPE}" STREQUAL "Release")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fomit-frame-pointer -fstrict-aliasing -ffunction-sections \
-fdata-sections -ffast-math")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fomit-frame-pointer -fstrict-aliasing -ffunction-sections \
-fdata-sections -ffast-math")
if(MSLITE_ENABLE_TRAIN)
file(GLOB_RECURSE TEST_TRAIN_UT_SRC
${TEST_DIR}/ut/src/runtime/kernel/arm/fp32_grad/*.cc
${TEST_DIR}/ut/src/runtime/kernel/arm/cxx_api/*.cc
)
list(APPEND TEST_UT_SRC ${TEST_TRAIN_UT_SRC})
endif()
file(GLOB KERNEL_OP_TRAIN_SRC
${LITE_DIR}/src/runtime/kernel/arm/fp32_grad/*.cc
if(MSLITE_GPU_BACKEND STREQUAL opencl)
file(GLOB_RECURSE TEST_GPU_UT_SRC
${TEST_DIR}/ut/src/runtime/kernel/opencl/*.cc
)
file(GLOB FP16_KERNEL_OP_SRC
${LITE_DIR}/src/runtime/kernel/arm/fp16/*.cc
)
if(SUPPORT_TRAIN)
list(APPEND KERNEL_OP_SRC ${KERNEL_OP_TRAIN_SRC})
list(APPEND TEST_UT_SRC ${TEST_GPU_UT_SRC})
endif()
if(MSLITE_ENABLE_FP16)
list(APPEND KERNEL_OP_SRC ${FP16_KERNEL_OP_SRC})
file(GLOB_RECURSE TEST_FP16_UT_SRC
${TEST_DIR}/ut/src/runtime/kernel/arm/fp16/*.cc
)
list(APPEND TEST_UT_SRC ${TEST_FP16_UT_SRC})
if(SUPPORT_TRAIN)
file(GLOB_RECURSE TEST_TRAIN_FP16_SRC
${TEST_DIR}/ut/src/runtime/kernel/arm/fp16_grad/*.cc)
list(APPEND TEST_UT_SRC ${TEST_TRAIN_FP16_SRC})
endif()
### gpu kernel
if(MSLITE_GPU_BACKEND STREQUAL opencl)
file(GLOB GPU_KERNEL_OP_SRC
${LITE_DIR}/src/runtime/kernel/opencl/kernel/*.cc
)
set(KERNEL_OP_SRC
${KERNEL_OP_SRC}
${GPU_KERNEL_OP_SRC}
${LITE_DIR}/src/runtime/kernel/opencl/opencl_kernel.cc
${LITE_DIR}/src/runtime/kernel/opencl/opencl_subgraph.cc
${LITE_DIR}/src/runtime/kernel/opencl/opencl_fusion.cc
${LITE_DIR}/src/runtime/kernel/opencl/utils.cc
)
endif()
### runtime framework
add_definitions(-DENABLE_V0)
file(GLOB_RECURSE OPS_SRC ${LITE_DIR}/src/ops/*.cc)
file(GLOB CXX_SRC
${LITE_DIR}/src/cxx_api/*.cc
${LITE_DIR}/src/cxx_api/graph/*.cc
${LITE_DIR}/src/cxx_api/model/*.cc
${LITE_DIR}/src/cxx_api/tensor/*.cc
)
file(GLOB CXX_API_TRAIN_SRCS
${LITE_DIR}/src/cxx_api/train/*.cc
${LITE_DIR}/src/cxx_api/metrics/*.cc
${LITE_DIR}/src/cxx_api/callback/*.cc
)
file(GLOB TRAIN_SRCS
${LITE_DIR}/src/train/*.cc
)
if(MSLITE_ENABLE_CONVERTER)
set(OPS_SRC ${OPS_SRC})
endif()
set(TEST_LITE_SRC
${TEST_LITE_SRC}
${CCSRC_SRC}
${OPS_SRC}
${CXX_SRC}
${KERNEL_OP_SRC}
${LITE_DIR}/src/runtime/inner_allocator.cc
${LITE_DIR}/src/runtime/infer_manager.cc
${LITE_DIR}/src/runtime/runtime_pass.cc
${LITE_DIR}/src/tensor.cc
${LITE_DIR}/src/ms_tensor.cc
${LITE_DIR}/src/tensorlist.cc
${LITE_DIR}/src/executor.cc
${LITE_DIR}/src/inner_context.cc
${LITE_DIR}/src/kernel_registry.cc
${LITE_DIR}/src/inner_kernel.cc
${LITE_DIR}/src/lite_kernel.cc
${LITE_DIR}/src/lite_kernel_util.cc
${LITE_DIR}/src/lite_session.cc
${LITE_DIR}/src/weight_decoder.cc
${LITE_DIR}/src/huffman_decode.cc
${LITE_DIR}/src/sub_graph_kernel.cc
${LITE_DIR}/src/sub_graph_split.cc
${LITE_DIR}/src/lite_model.cc
${LITE_DIR}/src/scheduler.cc
${LITE_DIR}/src/common/graph_util.cc
${LITE_DIR}/src/common/prim_util.cc
${LITE_DIR}/src/common/tensor_util.cc
${LITE_DIR}/src/common/context_util.cc
${LITE_DIR}/src/common/file_utils.cc
${LITE_DIR}/src/common/config_file.cc
${LITE_DIR}/src/common/utils.cc
${LITE_DIR}/src/common/dynamic_library_loader.cc
${LITE_DIR}/src/common/string_util.cc
${LITE_DIR}/src/common/lite_utils.cc
${LITE_DIR}/src/common/quant_utils.cc
${LITE_DIR}/src/delegate/delegate.cc
${LITE_DIR}/src/errorcode.cc
${LITE_DIR}/src/cpu_info.cc
${LITE_DIR}/tools/common/flag_parser.cc
${LITE_DIR}/src/train/train_populate_parameter.cc
${LITE_DIR}/src/train/train_populate_parameter_v0.cc
)
# Avoid multiple definitions
if(MSLITE_ENABLE_CONVERTER STREQUAL "off")
set(TEST_LITE_SRC
${TEST_LITE_SRC}
${LITE_DIR}/tools/converter/quantizer/fse_decoder.cc
${LITE_DIR}/tools/converter/quantizer/fse_bit_stream.cc
)
endif()
file(GLOB KERNEL_REG_SRC ${LITE_DIR}/src/registry/*.cc)
set(TEST_LITE_SRC ${TEST_LITE_SRC} ${KERNEL_REG_SRC})
if(MSLITE_ENABLE_TOOLS)
list(APPEND TEST_UT_SRC ${TEST_DIR}/st/benchmark_test.cc)
set(TEST_LITE_SRC
${TEST_LITE_SRC}
${LITE_DIR}/tools/benchmark/run_benchmark.cc
${LITE_DIR}/tools/benchmark/benchmark_base.cc
${LITE_DIR}/tools/benchmark/benchmark_unified_api.cc
${LITE_DIR}/tools/benchmark/benchmark.cc
${LITE_DIR}/test/st/benchmark_test.cc
)
if(NOT MSLITE_ENABLE_CONVERTER)
list(APPEND TEST_LITE_SRC ${LITE_DIR}/tools/common/flag_parser.cc)
endif()
endif()
### gpu runtime
if(MSLITE_GPU_BACKEND STREQUAL opencl)
include_directories(${CMAKE_BINARY_DIR}/_deps/opencl-headers-src)
include_directories(${CMAKE_BINARY_DIR}/_deps/opencl-clhpp-src/include)
file(GLOB_RECURSE OPENCL_RUNTIME_SRC
${LITE_DIR}/src/runtime/gpu/opencl/*.cc
)
set(TEST_LITE_SRC
${TEST_LITE_SRC}
${OPENCL_RUNTIME_SRC}
)
endif()
if(MSLITE_ENABLE_MINDRT)
include_directories(${CORE_DIR}/mindrt/)
include_directories(${CORE_DIR}/mindrt/src/)
set(TEST_LITE_SRC ${TEST_LITE_SRC}
${LITE_DIR}/src/lite_mindrt.cc
${LITE_DIR}/src/mindrt_executor.cc
${CORE_DIR}/mindrt/src/mindrt.cc
${CORE_DIR}/mindrt/src/actor/actor.cc
${CORE_DIR}/mindrt/src/actor/actormgr.cc
${CORE_DIR}/mindrt/src/actor/actorpolicy.cc
${CORE_DIR}/mindrt/src/actor/aid.cc
${CORE_DIR}/mindrt/src/async/future.cc
${CORE_DIR}/mindrt/src/async/uuid_base.cc
${CORE_DIR}/mindrt/src/async/uuid_generator.cc
${CORE_DIR}/mindrt/src/thread/threadpool.cc
${CORE_DIR}/mindrt/src/thread/core_affinity.cc
${CORE_DIR}/mindrt/src/thread/actor_threadpool.cc
)
endif()
### converter
if(MSLITE_ENABLE_CONVERTER)
add_definitions(-DPRIMITIVE_WRITEABLE)
add_definitions(-DUSE_GLOG)
file(GLOB_RECURSE TEST_CASE_TFLITE_PARSERS_SRC
file(GLOB_RECURSE TEST_CONVERTER_UT_SRC
${TEST_DIR}/ut/tools/converter/registry/*.cc
${TEST_DIR}/ut/tools/converter/parser/tflite/*.cc
)
set(TEST_LITE_SRC
${TEST_LITE_SRC}
${TEST_CASE_TFLITE_PARSERS_SRC}
${LITE_DIR}/tools/converter/ops/while.cc
${LITE_DIR}/tools/common/protobuf_utils.cc
${LITE_DIR}/tools/converter/optimizer.cc
${LITE_DIR}/tools/converter/anf_transform.cc
${LITE_DIR}/tools/converter/graphdef_transform.cc
${LITE_DIR}/tools/converter/converter_flags.cc
${LITE_DIR}/tools/converter/converter.cc
${LITE_DIR}/tools/converter/export_model.cc
${LITE_DIR}/tools/converter/optimizer_manager.cc
${LITE_DIR}/tools/converter/parser/parser_utils.cc
${LITE_DIR}/tools/optimizer/common/node_pass_extends.cc
${LITE_DIR}/tools/optimizer/common/pass_manager_extends.cc
${LITE_DIR}/tools/optimizer/common/gllo_utils.cc
${LITE_DIR}/tools/optimizer/common/format_utils.cc
${LITE_DIR}/tools/optimizer/common/multiple_pattern_process_pass.cc
${LITE_DIR}/tools/optimizer/format/delete_redundant_transpose.cc
${LITE_DIR}/tools/optimizer/format/to_format_base.cc
${LITE_DIR}/tools/optimizer/format/to_nchw_format.cc
${LITE_DIR}/tools/optimizer/format/to_nhwc_format.cc
${LITE_DIR}/tools/optimizer/fusion/affine_activation_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/affine_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/conv_biasadd_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/conv_activation_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/conv_tuple_activation_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/conv_transform_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/conv_scale_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/conv_bn_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/multi_head_attention_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/reshape_reshape_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/constant_folding_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/norm_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/batchmatmul_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/sigmoid_mul_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/conv_conv_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/conv_pad_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/conv_tuplegetitem_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/tflite_lstm_cell_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/tf_lstm_cell_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/tf_bidirection_gru_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/matmul_add_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/mul_add_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/tflite_rel_pos_multi_head_attention_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/glu_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/gelu_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/tf_gelu_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/onnx_gelu_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/squeeze_fusion.cc
${LITE_DIR}/tools/optimizer/fusion/transpose_fusion.cc
${LITE_DIR}/tools/optimizer/graph/add_tensor_array.cc
${LITE_DIR}/tools/optimizer/graph/conv1d_weight_expanding_pass.cc
${LITE_DIR}/tools/optimizer/graph/clip_convert_activation_pass.cc
${LITE_DIR}/tools/optimizer/graph/group_depthwise_op_convert_pass.cc
${LITE_DIR}/tools/optimizer/graph/update_conv2d_param_pass.cc
${LITE_DIR}/tools/optimizer/graph/unused_cast_node_remove_pass.cc
${LITE_DIR}/tools/optimizer/graph/unused_transpose_node_remove_pass.cc
${LITE_DIR}/tools/optimizer/graph/redundant_op_remove_pass.cc
${LITE_DIR}/tools/optimizer/graph/infershape_pass.cc
${LITE_DIR}/tools/optimizer/graph/slice_prepose_pass.cc
${LITE_DIR}/tools/optimizer/graph/control_flow_pass.cc
${LITE_DIR}/tools/optimizer/graph/decrease_transpose_algo.cc
${LITE_DIR}/tools/optimizer/graph/node_infershape.cc
${LITE_DIR}/tools/optimizer/graph/transpose_strategy.cc
${LITE_DIR}/tools/optimizer/graph/reduce_same_act_pass.cc
${LITE_DIR}/tools/optimizer/graph/split_one_pass.cc
${LITE_DIR}/tools/optimizer/graph/specify_graph_input_format.cc
${LITE_DIR}/tools/optimizer/fisson/eliminate_concat_split.cc
${LITE_DIR}/tools/optimizer/fisson/fisson_util.cc
${LITE_DIR}/tools/optimizer/fisson/iter_node_outputs.cc
${LITE_DIR}/tools/optimizer/fisson/node_out_shapes.cc
${LITE_DIR}/tools/optimizer/fisson/multi_conv_split_pass.cc
${LITE_DIR}/tools/optimizer/parallel/multi_node_split.cc
${LITE_DIR}/tools/optimizer/parallel/multi_conv_info.cc
${LITE_DIR}/tools/optimizer/parallel/parallel_pass.cc
${LITE_DIR}/tools/optimizer/parallel/operator_info.cc
${LITE_DIR}/tools/optimizer/parallel/operator_info_register.cc
${LITE_DIR}/tools/optimizer/parallel/conv2d_info.cc
${LITE_DIR}/tools/optimizer/parallel/depthwise_conv2d_info.cc
${LITE_DIR}/tools/optimizer/parallel/spliter.cc
${LITE_DIR}/tools/optimizer/parallel/split_strategy.cc
${LITE_DIR}/tools/common/graph_util.cc
${LITE_DIR}/tools/common/tensor_util.cc
${LITE_DIR}/tools/common/node_util.cc
${LITE_DIR}/tools/common/storage.cc
${LITE_DIR}/tools/common/parse_config_utils.cc
${LITE_DIR}/tools/common/string_util.cc
${LITE_DIR}/tools/converter/parser/inputs_adjust.cc
${LITE_DIR}/tools/converter/parser/unify_format.cc
${LITE_DIR}/tools/converter/parser/lstm_adjust_pass.cc
${LITE_DIR}/tools/converter/parser/unused_node_remove_pass.cc
${LITE_DIR}/tools/converter/parser/conv1d_inout_adjust.cc
${LITE_DIR}/tools/converter/parser/tf_bidirection_gru_cf_fusion.cc
${LITE_DIR}/tools/converter/import/mindspore_importer.cc
${LITE_DIR}/tools/converter/import/primitive_adjust.cc
${LITE_DIR}/tools/converter/import/mindir_adjust.cc
${LITE_DIR}/tools/converter/import/mindir_control_flow_adjust.cc
)
endif()
### train
if(SUPPORT_TRAIN)
set(TEST_LITE_SRC
${TEST_LITE_SRC}
${CXX_API_TRAIN_SRCS}
${TRAIN_SRCS}
${LITE_DIR}/tools/common/storage.cc
)
endif()
### test src
file(GLOB_RECURSE TEST_CASE_KERNEL_SRC
${TEST_DIR}/ut/src/runtime/kernel/arm/common/*.cc
${TEST_DIR}/ut/src/runtime/kernel/arm/fp32/*.cc
${TEST_DIR}/ut/src/runtime/kernel/arm/int8/*.cc
${TEST_DIR}/ut/src/runtime/kernel/arm/string/*.cc
${TEST_DIR}/ut/src/runtime/runtime_pass_tests.cc
${TEST_DIR}/ut/nnacl/infer/*.cc
)
file(GLOB_RECURSE TEST_CASE_KERNEL_TRAIN_SRC
${TEST_DIR}/ut/src/runtime/kernel/arm/fp32_grad/*.cc
${TEST_DIR}/ut/src/runtime/kernel/arm/cxx_api/*.cc
)
set(TEST_SRC
${TEST_LITE_SRC}
${TEST_MINDDATA_SRC}
${TEST_CASE_KERNEL_SRC}
${TEST_DIR}/main.cc
${TEST_DIR}/common/common_test.cc
${TEST_DIR}/st/mix_data_type_test.cc
${TEST_DIR}/ut/src/infer_test.cc
${TEST_DIR}/ut/src/utils_test.cc
${TEST_DIR}/ut/src/dynamic_library_loader_test.cc
${TEST_DIR}/ut/src/scheduler_test.cc
${TEST_DIR}/ut/src/lite_mindrt_test.cc
${TEST_DIR}/ut/src/registry/registry_test.cc
${TEST_DIR}/ut/src/registry/registry_custom_op_test.cc
${TEST_DIR}/st/multiple_device_test.cc
${TEST_DIR}/st/mindrt_parallel_runtime_test.cc
${TEST_DIR}/st/mix_data_type_test.cc
)
if(MSLITE_ENABLE_CONVERTER)
set(TEST_SRC
${TEST_SRC}
${TEST_DIR}/st/converter_test.cc
${TEST_DIR}/st/mindrt_parallel_test.cc
${TEST_DIR}/st/graph_test.cc
${TEST_DIR}/st/sub_graph_test.cc
${TEST_DIR}/common/import_from_meta_graphT.cc
${TEST_DIR}/ut/src/dynamic_library_loader_test.cc
${TEST_DIR}/ut/tools/optimizer/fusion/conv_biasadd_fusion_test.cc
${TEST_DIR}/ut/tools/optimizer/fusion/conv_bn_fusion_test.cc
${TEST_DIR}/ut/tools/optimizer/fusion/conv_scale_fusion_test.cc
${TEST_DIR}/ut/tools/optimizer/fusion/conv_activation_fusion_test.cc
${TEST_DIR}/ut/tools/optimizer/fusion/constant_folding_fusion_test.cc
)
else()
set(TEST_SRC
${TEST_SRC}
${CORE_DIR}/utils/status.cc)
endif()
list(APPEND TEST_UT_SRC ${TEST_CONVERTER_UT_SRC})
if(SUPPORT_TRAIN)
set(TEST_SRC
${TEST_SRC}
${TEST_CASE_KERNEL_TRAIN_SRC}
set(TEST_LITE_SRC
${TEST_LITE_SRC}
${TEST_DIR}/common/import_from_meta_graphT.cc
${LITE_DIR}/src/ops/ops_utils.cc
${LITE_DIR}/src/ops/ops_def.cc
)
endif()
if(MSLITE_GPU_BACKEND STREQUAL opencl)
file(GLOB_RECURSE TEST_CASE_KERNEL_GPU_SRC
${TEST_DIR}/ut/src/runtime/kernel/opencl/*.cc
)
set(TEST_SRC
${TEST_SRC}
${TEST_CASE_KERNEL_GPU_SRC}
)
endif()
if(MSLITE_ENABLE_FP16)
file(GLOB_RECURSE TEST_CASE_KERNEL_FP16_SRC
${TEST_DIR}/ut/src/runtime/kernel/arm/fp16/*.cc
)
file(GLOB_RECURSE KERNEL_FP16_SRC
${LITE_DIR}/src/runtime/kernel/arm/fp16/*.cc
)
set(TEST_SRC
${TEST_SRC}
${KERNEL_FP16_SRC}
${TEST_CASE_KERNEL_FP16_SRC}
${TEST_UT_SRC}
${TEST_LITE_SRC}
)
endif()
if(MSLITE_ENABLE_FP16 AND SUPPORT_TRAIN)
file(GLOB_RECURSE TEST_CASE_KERNEL_FP16_SRC_GRAD
${TEST_DIR}/ut/src/runtime/kernel/arm/fp16_grad/*.cc)
list(APPEND TEST_SRC ${TEST_CASE_KERNEL_FP16_SRC_GRAD})
endif()
file(GLOB_RECURSE API_SRC ${LITE_DI}/src/cxx_api/*.cc)
set(TEST_SRC ${TEST_SRC} ${API_SRC})
add_executable(lite-test ${TEST_SRC})
add_dependencies(lite-test fbs_src)
add_dependencies(lite-test fbs_src fbs_inner_src)
if(SUPPORT_TRAIN)
add_dependencies(lite-test fbs_inner_src)
target_link_libraries(lite-test minddata-lite)
endif()
target_link_libraries(lite-test mindspore-lite dl mindspore::gtest)
target_link_libraries(lite-test
nnacl_mid
dl
mindspore::gtest
)
if(PLATFORM_ARM AND MSLITE_ENABLE_FP16)
target_link_libraries(lite-test nnacl_fp16_mid)
if(PLATFORM_ARM64)
target_link_libraries(lite-test nnacl_optimize_mid)
endif()
if(MSLITE_ENABLE_TRAIN)
target_link_libraries(lite-test mindspore-lite-train minddata-lite)
endif()
if(PLATFORM_ARM)
@ -431,13 +121,20 @@ else()
target_link_libraries(lite-test ${SECUREC_LIBRARY} pthread)
endif()
if(SUPPORT_NPU)
include_directories(${DDK_PATH})
include_directories(${DDK_INCLUDE_PATH})
target_link_libraries(lite-test npu_kernel_mid c++_shared)
if(MSLITE_ENABLE_NPU)
target_link_libraries(lite-test c++_shared)
endif()
if(MSLITE_ENABLE_MINDRT)
add_library(mindrt_test_mid OBJECT ${TEST_DIR}/ut/src/lite_mindrt_test.cc)
add_dependencies(mindrt_test_mid fbs_src fbs_inner_src)
if("${CMAKE_BUILD_TYPE}" STREQUAL "Release")
target_compile_options(mindrt_test_mid PRIVATE -fno-rtti)
endif()
target_link_libraries(lite-test mindrt_test_mid)
endif()
if(MSLITE_ENABLE_CONVERTER)
add_dependencies(lite-test fbs_inner_src)
target_link_libraries(lite-test
anf_exporter_mid
mslite_converter_plugin
@ -449,6 +146,8 @@ if(MSLITE_ENABLE_CONVERTER)
fusion_mid
quantizer_mid
proto_mid
ccsrc_src_mid
converter_src_mid
mindspore::protobuf
mindspore::eigen
mindspore::json
@ -458,6 +157,7 @@ if(MSLITE_ENABLE_CONVERTER)
config_parser_mid
)
endif()
if(ENABLE_MODEL_OBF)
target_link_libraries(lite-test ${OBF_LIB_DIR}/libmsdeobfuscator-lite.so)
endif()

View File

@ -38,15 +38,9 @@ class CommonTest : public testing::Test {
template <typename T>
void PrintData(const std::string &name, T *output_data, int size) {
std::cout << "The " << name << " is as follows:" << std::endl;
if (typeid(output_data[0]) == typeid(uint8_t) || typeid(output_data[0]) == typeid(int8_t)) {
for (int i = 0; i < std::min(size, 100); i++) {
std::cout << static_cast<int>(output_data[i]) << " ";
}
} else {
for (int i = 0; i < std::min(size, 100); i++) {
std::cout << output_data[i] << " ";
}
}
std::cout << std::endl;
}

View File

@ -16,14 +16,14 @@
#include <iostream>
#include "gtest/gtest.h"
#ifndef USE_ANDROID_LOG
#ifdef USE_GLOG
namespace mindspore {
extern void InitSubModulesLogLevel();
}
#endif
GTEST_API_ int main(int argc, char **argv) {
#ifndef USE_ANDROID_LOG
#ifdef USE_GLOG
mindspore::InitSubModulesLogLevel();
#endif
testing::InitGoogleTest(&argc, argv);

View File

@ -14,7 +14,7 @@ cd ${CUR_DIR}/do_test
cp ${BUILD_DIR}/test/lite-test ./
cp ${BUILD_DIR}/googletest/googlemock/gtest/libgtest.so ./
ls -l *.so*
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./
export LD_LIBRARY_PATH=./:${TENSORRT_PATH}/lib:${CUDA_HOME}/lib64:${LD_LIBRARY_PATH}
cp -r ${CUR_DIR}/ut/src/runtime/kernel/arm/test_data/* ./
cp -r ${CUR_DIR}/ut/tools/converter/parser/tflite/test_data/* ./

View File

@ -6,7 +6,7 @@ set(CCSRC_SRC
${CCSRC_DIR}/backend/optimizer/common/visit.cc
${CCSRC_DIR}/backend/optimizer/common/optimizer.cc
)
include(${TOP_DIR}/cmake/external_libs/glog.cmake)
include_directories(${TOP_DIR}/mindspore/ccsrc/backend/kernel_compiler/cpu)
if(NOT WIN32)
@ -15,7 +15,6 @@ endif()
file(GLOB_RECURSE CONVERTER_SRC RELATIVE ${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/ops/*.cc
${CMAKE_CURRENT_SOURCE_DIR}/../flag/flag_parser.cc
${CMAKE_CURRENT_SOURCE_DIR}/converter.cc
${CMAKE_CURRENT_SOURCE_DIR}/converter_flags.cc
${CMAKE_CURRENT_SOURCE_DIR}/anf_transform.cc
@ -152,6 +151,7 @@ set(LITE_SRC
${SRC_DIR}/common/graph_util.cc
${SRC_DIR}/common/string_util.cc
${SRC_DIR}/common/lite_utils.cc
${SRC_DIR}/common/log.cc
${SRC_DIR}/common/prim_util.cc
${SRC_DIR}/common/tensor_util.cc
${SRC_DIR}/runtime/inner_allocator.cc
@ -196,16 +196,21 @@ set_property(SOURCE ${CONVERTER_SRC} PROPERTY COMPILE_DEFINITIONS SUBMODULE_ID=m
set_property(SOURCE ${CCSRC_SRC} PROPERTY COMPILE_DEFINITIONS SUBMODULE_ID=mindspore::SubModuleId::SM_LITE)
set_property(SOURCE ${LITE_SRC} PROPERTY COMPILE_DEFINITIONS SUBMODULE_ID=mindspore::SubModuleId::SM_LITE)
add_library(converter_src_mid OBJECT ${CONVERTER_SRC})
add_dependencies(converter_src_mid fbs_src fbs_inner_src)
add_library(ccsrc_src_mid OBJECT ${CCSRC_SRC})
add_dependencies(ccsrc_src_mid fbs_src fbs_inner_src)
add_executable(converter_lite
main.cc
${CCSRC_SRC}
${CONVERTER_SRC}
${LITE_SRC}
)
add_dependencies(converter_lite fbs_src)
add_dependencies(converter_lite fbs_inner_src)
add_dependencies(converter_lite fbs_src fbs_inner_src)
target_link_libraries(converter_lite PRIVATE
ccsrc_src_mid
converter_src_mid
mindrt_mid
cpu_ops_mid
nnacl_mid

View File

@ -23,7 +23,9 @@
#include "tools/common/storage.h"
#include "tools/anf_exporter/anf_exporter.h"
#include "include/version.h"
#ifdef SUPPORT_TRAIN
#include "src/train/train_populate_parameter.h"
#endif
#include "include/registry/model_parser_registry.h"
#include "src/common/dynamic_library_loader.h"
#include "src/common/log_util.h"
@ -43,7 +45,9 @@ void InitConverterParameters(const converter::Flags &flag, converter::ConverterP
FuncGraphPtr Converter::BuildFuncGraph(const converter::Flags &flag) {
FuncGraphPtr func_graph = nullptr;
if (flag.fmk == converter::FmkType::kFmkTypeMs) {
#ifdef SUPPORT_TRAIN
kernel::PopulateTrainParameters();
#endif
MindsporeImporter ms_import;
func_graph = ms_import.ImportMindIR(flag);
if (func_graph == nullptr) {

View File

@ -3,7 +3,7 @@ set(COMMON_SRC
${CMAKE_CURRENT_SOURCE_DIR}/../common/flag_parser.cc
${CMAKE_CURRENT_SOURCE_DIR}/../../src/common/file_utils.cc
${CMAKE_CURRENT_SOURCE_DIR}/../../src/common/utils.cc
${CMAKE_CURRENT_SOURCE_DIR}/../../src/common/log_adapter.cc
${CMAKE_CURRENT_SOURCE_DIR}/../../src/common/log.cc
${CMAKE_CURRENT_SOURCE_DIR}/../../src/errorcode.cc
)

File diff suppressed because one or more lines are too long

View File

@ -526,7 +526,7 @@ abstract::AbstractBasePtr NodeInferShape::ConvertLiteTensorToAbstract(lite::Tens
// both of them is different in term of shape and type.
abstract::AbstractBasePtr NodeInferShape::ConvertTensorListToAbstract(lite::Tensor *tensor) {
MS_ASSERT(tensor != nullptr);
auto tensor_list = dynamic_cast<lite::TensorList *>(tensor);
auto tensor_list = reinterpret_cast<lite::TensorList *>(tensor);
if (tensor_list == nullptr) {
MS_LOG(ERROR) << "cast tensor_list failed";
return nullptr;

View File

@ -5,7 +5,7 @@ set(COMMON_SRC
${CMAKE_CURRENT_SOURCE_DIR}/../common/flag_parser.cc
${CMAKE_CURRENT_SOURCE_DIR}/../../src/common/file_utils.cc
${CMAKE_CURRENT_SOURCE_DIR}/../../src/common/utils.cc
${CMAKE_CURRENT_SOURCE_DIR}/../../src/common/log_adapter.cc
${CMAKE_CURRENT_SOURCE_DIR}/../../src/common/log.cc
${CMAKE_CURRENT_SOURCE_DIR}/../../src/ops/ops_def.cc
)
add_executable(schema_gen