!31427 [MS][LITE][DEVELOP]support GLOG

Merge pull request !31427 from chenjianping/master_dev
This commit is contained in:
i-robot 2022-03-21 09:23:53 +00:00 committed by Gitee
commit 4f29a71464
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
12 changed files with 63 additions and 27 deletions

View File

@ -239,6 +239,10 @@ if(PLATFORM_ARM64)
if(ANDROID_NDK_TOOLCHAIN_INCLUDED OR MSLITE_ENABLE_CONVERTER)
__install_micro_wrapper()
endif()
if(MSLITE_ENABLE_RUNTIME_GLOG)
install(FILES ${glog_LIBPATH}/libglog.so.0.4.0 DESTINATION ${GLOG_DIR} RENAME libglog.so.0
COMPONENT ${RUNTIME_COMPONENT_NAME})
endif()
if(MSLITE_ENABLE_TOOLS)
if(NOT MSLITE_COMPILE_TWICE)
install(TARGETS ${BENCHMARK_NAME} RUNTIME DESTINATION ${BENCHMARK_ROOT_DIR}
@ -636,6 +640,10 @@ else()
install(FILES ${TOP_DIR}/mindspore/lite/tools/obfuscator/lib/linux-x64/libmsdeobfuscator-lite.so
DESTINATION ${RUNTIME_LIB_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
endif()
if(MSLITE_ENABLE_RUNTIME_GLOG)
install(FILES ${glog_LIBPATH}/libglog.so.0.4.0 DESTINATION ${GLOG_DIR} RENAME libglog.so.0
COMPONENT ${RUNTIME_COMPONENT_NAME})
endif()
if(MSLITE_ENABLE_CONVERTER)
install(DIRECTORY ${TOP_DIR}/mindspore/lite/include/ DESTINATION ${CONVERTER_ROOT_DIR}/include
COMPONENT ${RUNTIME_COMPONENT_NAME} FILES_MATCHING PATTERN "*.h"

View File

@ -23,6 +23,7 @@
#include <map>
#include <iomanip>
#include <thread>
#include <vector>
#include "utils/convert_utils_base.h"
// namespace to support utils module definition
@ -164,7 +165,7 @@ void LogWriter::operator<(const LogStream &stream) const noexcept {
msg << stream.sstream_->rdbuf();
OutputLog(msg);
}
#ifndef BUILD_LITE_INFERENCE
void LogWriter::operator^(const LogStream &stream) const {
std::ostringstream msg;
msg << stream.sstream_->rdbuf();
@ -189,6 +190,7 @@ void LogWriter::operator^(const LogStream &stream) const {
}
throw std::runtime_error(oss.str());
}
#endif
static inline std::string GetEnv(const std::string &envvar) {
const char *value = std::getenv(envvar.c_str());
@ -547,11 +549,17 @@ MS_CORE_API void common_log_init(void) {
FLAGS_logtostderr = true;
} else if (logtostderr == "0") {
if (mindspore::GetEnv("GLOG_log_dir").empty()) {
#ifndef BUILD_LITE_INFERENCE
MS_LOG(ERROR) << "`GLOG_log_dir` is empty, it must be set while 'logtostderr' equals to 0.";
// Here can not throw exception and use python to catch, because the PYBIND11_MODULE is not yet been initialed.
exit(EXIT_FAILURE);
#else
MS_LOG(WARNING) << "`GLOG_log_dir` is empty, log will be printed to stderr.";
FLAGS_logtostderr = true;
#endif
} else {
// Set log dir from GLOG_log_dir with RANK_ID or OMPI_COMM_WORLD_RANK.
#ifndef BUILD_LITE_INFERENCE
const std::string rank_id = mindspore::GetEnv("RANK_ID");
const std::string gpu_rank_id = mindspore::GetEnv("OMPI_COMM_WORLD_RANK");
std::string rank = "0";
@ -561,6 +569,9 @@ MS_CORE_API void common_log_init(void) {
rank = gpu_rank_id;
}
FLAGS_log_dir = mindspore::GetEnv("GLOG_log_dir") + "/rank_" + rank + "/logs";
#else
FLAGS_log_dir = mindspore::GetEnv("GLOG_log_dir");
#endif
}
}

View File

@ -146,7 +146,11 @@ enum SubModuleId : int {
};
#ifndef SUBMODULE_ID
#ifndef BUILD_LITE_INFERENCE
#define SUBMODULE_ID mindspore::SubModuleId::SM_ME
#else
#define SUBMODULE_ID mindspore::SubModuleId::SM_LITE
#endif
#endif
/// \brief Get sub-module name by the module id.
@ -204,10 +208,12 @@ class MS_CORE_API LogWriter {
/// \param[in] stream The input log stream.
void operator<(const LogStream &stream) const noexcept;
#ifndef BUILD_LITE_INFERENCE
/// \brief Output log message from the input log stream and then throw exception.
///
/// \param[in] stream The input log stream.
void operator^(const LogStream &stream) const __attribute__((noreturn));
#endif
static void set_exception_handler(const ExceptionHandler &exception_handler);
static void set_trace_provider(const TraceProvider &trace_provider);
@ -230,10 +236,16 @@ class MS_CORE_API LogWriter {
: mindspore::LogWriter(mindspore::LocationInfo(FILE_NAME, __LINE__, __FUNCTION__), level, SUBMODULE_ID, \
excp_type) < mindspore::LogStream()
#ifndef BUILD_LITE_INFERENCE
#define MSLOG_THROW(excp_type) \
mindspore::LogWriter(mindspore::LocationInfo(FILE_NAME, __LINE__, __FUNCTION__), mindspore::EXCEPTION, SUBMODULE_ID, \
excp_type) ^ \
mindspore::LogStream()
#else
#define MSLOG_THROW(excp_type) \
mindspore::LogWriter(mindspore::LocationInfo(FILE_NAME, __LINE__, __FUNCTION__), mindspore::ERROR, SUBMODULE_ID, \
excp_type) < mindspore::LogStream()
#endif
inline bool IS_OUTPUT_ON(enum MsLogLevel level) noexcept(true) {
return (static_cast<int>(level) >= mindspore::g_ms_submodule_log_levels[SUBMODULE_ID] &&

View File

@ -153,11 +153,6 @@ if(DEFINED ENV{MSLITE_ENABLE_SHARING_MEM_WITH_OPENGL})
set(MSLITE_ENABLE_SHARING_MEM_WITH_OPENGL $ENV{MSLITE_ENABLE_SHARING_MEM_WITH_OPENGL})
endif()
option(MSLITE_ENABLE_DYNAMIC_THREAD_DISTRIBUTE "enable distribute thread dynamically" off)
option(MSLITE_ENABLE_BFC_MEMORY "enable distribute BFC memory" off)
option(MSLITE_ENABLE_PARALLEL_INFERENCE "enable parallel inference interface" off)
option(MSLITE_ENABLE_SHARING_MODEL_WEIGHT "enable sharing model weight" off)
if(DEFINED ENV{MSLITE_ENABLE_SERVER_INFERENCE})
set(MSLITE_ENABLE_SERVER_INFERENCE $ENV{MSLITE_ENABLE_SERVER_INFERENCE})
set(MSLITE_ENABLE_DYNAMIC_THREAD_DISTRIBUTE on)
@ -193,6 +188,9 @@ endif()
if(MSLITE_ENABLE_SHARING_MODEL_WEIGHT)
add_compile_definitions(SHARING_MODEL_WEIGHT)
endif()
if(DEFINED ENV{MSLITE_ENABLE_SERVING})
set(MSLITE_ENABLE_SERVING $ENV{MSLITE_ENABLE_SERVING})
endif()
if(MACHINE_LINUX_ARM64)
add_compile_definitions(MACHINE_LINUX_ARM64)
@ -216,9 +214,6 @@ elseif(TOOLCHAIN_NAME STREQUAL "ohos-lite")
set(TARGET_OHOS_LITE on)
SET_PROPERTY(GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS TRUE)
endif()
if(MSLITE_ENABLE_SERVER_INFERENCE)
add_compile_definitions(SERVER_INFERENCE)
endif()
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.3.0
AND NOT TARGET_HIMIX AND NOT TARGET_MIX210)
@ -279,6 +274,12 @@ else()
set(MSLITE_ENABLE_NPU off)
endif()
if(DEFINED ENV{MSLITE_ENABLE_SERVER_INFERENCE})
set(MSLITE_ENABLE_SERVER_INFERENCE $ENV{MSLITE_ENABLE_SERVER_INFERENCE})
set(MSLITE_ENABLE_RUNTIME_GLOG off)
add_compile_definitions(SERVER_INFERENCE)
endif()
if(MSLITE_ENABLE_SSE OR MSLITE_ENABLE_AVX OR MSLITE_ENABLE_AVX512 OR WIN32)
set(MSLITE_ENABLE_RUNTIME_CONVERT off)
endif()

View File

@ -5,7 +5,7 @@ endif()
include_directories(${CCSRC_DIR}/plugin/device/cpu/kernel)
set(TOOLS_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../tools)
add_compile_definitions(BUILD_LITE_INFERENCE)
if(NOT MSLITE_ENABLE_STRING_KERNEL)
add_compile_definitions(STRING_KERNEL_CLIP)
endif()
@ -30,7 +30,6 @@ endif()
if(NOT MSLITE_ENABLE_INT8)
add_compile_definitions(OP_INT8_CLIP)
endif()
if(APPLE OR PLATFORM_ARM32 OR PLATFORM_ARM64)
#for performance
if("${CMAKE_BUILD_TYPE}" STREQUAL "Release" AND NOT APPLE)
@ -179,7 +178,6 @@ if(BUILD_EXPERIMENT)
endif()
if(MSLITE_ENABLE_RUNTIME_GLOG)
add_definitions(-DPRIMITIVE_WRITEABLE)
add_definitions(-DUSE_GLOG)
string(REPLACE "-fno-rtti" "" CMAKE_C_FLAGS ${CMAKE_C_FLAGS})
string(REPLACE "-fno-rtti" "" CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS})
@ -191,7 +189,7 @@ endif()
if(MSLITE_ENABLE_RUNTIME_CONVERT)
add_definitions(-DRUNTIME_CONVERT)
add_definitions(-DPRIMITIVE_WRITEABLE)
file(GLOB RUNTIME_CONVERT_SRC
${CMAKE_CURRENT_SOURCE_DIR}/ops/ops_def.cc
${CMAKE_CURRENT_SOURCE_DIR}/ops/ops_utils.cc

View File

@ -15,7 +15,7 @@
*/
#include "include/api/model_parallel_runner.h"
#include "src/cxx_api/model_pool/model_pool.h"
#include "src/common/log.h"
#include "src/common/log_adapter.h"
namespace mindspore {
Status ModelParallelRunner::Init(const std::string &model_path, const std::shared_ptr<RunnerConfig> &runner_config) {

View File

@ -16,7 +16,7 @@
#include "src/cxx_api/model_pool/model_pool.h"
#include <unistd.h>
#include <future>
#include "src/common/log.h"
#include "src/common/log_adapter.h"
#include "include/lite_types.h"
#include "src/common/config_file.h"
#include "src/runtime/inner_allocator.h"

View File

@ -14,7 +14,7 @@
* limitations under the License.
*/
#include "src/cxx_api/model_pool/model_worker.h"
#include "src/common/log.h"
#include "src/common/log_adapter.h"
#include "src/common/utils.h"
#include "src/common/common.h"
namespace mindspore {

View File

@ -63,7 +63,7 @@
namespace mindspore {
#ifdef USE_GLOG
extern "C" {
extern void common_log_init();
extern void mindspore_log_init();
}
#endif
namespace lite {
@ -137,7 +137,13 @@ bool ExistCustomCpuKernel() {
LiteSession::LiteSession() {
#ifdef USE_GLOG
mindspore::common_log_init();
#if defined(_WIN32) || defined(_WIN64) || defined(__APPLE__)
#ifdef _MSC_VER
mindspore::mindspore_log_init();
#endif
#else
mindspore::mindspore_log_init();
#endif
#endif
this->is_running_.store(false);
}

View File

@ -33,7 +33,7 @@ function Run_server_inference_avx512() {
cd ${path}/server || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${path}/server/mindspore-lite-${version}-linux-x64 || exit 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib:./runtime/third_party/glog
cp tools/benchmark/benchmark ./ || exit 1
# $1:cfgFileList; $2:modelPath; $3:dataPath; $4:logFile; $5:resultFile; $6:platform; $7:processor; $8:phoneId; $9:benchmark_mode
Run_Benchmark "${models_server_inference_cfg_file_list[*]}" $ms_models_path $models_path $run_server_inference_x86_log_file $run_benchmark_result_file 'x86_avx512' 'CPU' '' $run_fail_not_return
@ -49,7 +49,7 @@ function Run_server_inference_arm64() {
cd ${path}/server || exit 1
tar -zxf mindspore-lite-${version}-linux-aarch64.tar.gz || exit 1
cd ${path}/server/mindspore-lite-${version}-linux-aarch64 || exit 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib:./runtime/third_party/glog
cp tools/benchmark/benchmark ./ || exit 1
# $1:cfgFileList; $2:modelPath; $3:dataPath; $4:logFile; $5:resultFile; $6:platform; $7:processor; $8:phoneId; $9:benchmark_mode
Run_Benchmark "${models_server_inference_cfg_file_list[*]}" $ms_models_path $models_path $run_server_inference_arm64_log_file $run_benchmark_result_file 'x86_avx512' 'CPU' '' $run_fail_not_return

View File

@ -28,7 +28,7 @@ function Run_TensorRT_Mpirun() {
tar -zxf ${x86_path}/tensorrt/mindspore-lite-${version}-linux-x64.tar.gz || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/tensorrt/mindspore-lite-${version}-linux-x64/ || exit 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib:./runtime/third_party/glog
cp tools/benchmark/benchmark ./ || exit 1
echo "start mpirun models..."
@ -91,7 +91,7 @@ function Run_TensorRT() {
tar -zxf ${x86_path}/tensorrt/mindspore-lite-${version}-linux-x64.tar.gz || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/tensorrt/mindspore-lite-${version}-linux-x64/ || exit 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib:./runtime/third_party/glog
cp tools/benchmark/benchmark ./ || exit 1
local line_info model_info spec_acc_limit model_name input_num input_shapes \

View File

@ -69,7 +69,7 @@ function Run_x86() {
# $1:framework;
echo 'cd '${x86_path}'/mindspore-lite-'${version}'-linux-*' >> "${run_x86_log_file}"
cd ${x86_path}/mindspore-lite-${version}-linux-*/ || exit 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib:./runtime/third_party/glog
cp tools/benchmark/benchmark ./ || exit 1
# Run converted models:
# $1:cfgFileList; $2:modelPath; $3:dataPath; $4:logFile; $5:resultFile; $6:platform; $7:processor; $8:phoneId;
@ -81,7 +81,7 @@ function Run_x86_sse() {
cd ${x86_path}/sse || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/sse/mindspore-lite-${version}-linux-x64 || exit 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib:./runtime/third_party/glog
cp tools/benchmark/benchmark ./ || exit 1
# Run converted models:
@ -94,7 +94,7 @@ function Run_x86_avx() {
cd ${x86_path}/avx || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/avx/mindspore-lite-${version}-linux-x64 || exit 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib:./runtime/third_party/glog
cp tools/benchmark/benchmark ./ || exit 1
# Run converted models:
@ -107,7 +107,7 @@ function Run_x86_avx512() {
cd ${x86_path}/avx512 || exit 1
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
cd ${x86_path}/avx512/mindspore-lite-${version}-linux-x64 || exit 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib:./runtime/third_party/glog
cp tools/benchmark/benchmark ./ || exit 1
# Run converted models:
@ -159,7 +159,7 @@ function Run_x86_parallel_split() {
rm -rf parallel_split
mkdir parallel_split
cd parallel_split || exit 1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../runtime/lib
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../runtime/lib:./runtime/third_party/glog
cp ../tools/benchmark/benchmark ./ || exit 1
# Run tflite parallel split converted models: