forked from mindspore-Ecosystem/mindspore
!4908 Modify the '-fvisibility' attribute during building of mindspore-lite.
Merge pull request !4908 from wangshaocong/lite_clean
This commit is contained in:
commit
6d0bbb36a3
|
@ -7,7 +7,6 @@ set(OPENCV_DIR ${MAIN_DIR}/third_party/opencv)
|
|||
set(PROTOBF_DIR ${MAIN_DIR}/third_party/protobuf)
|
||||
set(FLATBF_DIR ${MAIN_DIR}/third_party/flatbuffers)
|
||||
|
||||
install(FILES ${TOP_DIR}/mindspore/lite/build/src/libmindspore-lite.so DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
|
||||
if (BUILD_MINDDATA)
|
||||
install(DIRECTORY ${TOP_DIR}/mindspore/ccsrc/minddata/dataset/include/ DESTINATION ${INC_DIR} COMPONENT ${COMPONENT_NAME} FILES_MATCHING PATTERN "*.h")
|
||||
install(FILES ${TOP_DIR}/mindspore/lite/build/minddata/libminddata-lite.so DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
|
||||
|
@ -30,24 +29,30 @@ if (BUILD_MINDDATA)
|
|||
endif ()
|
||||
|
||||
if (PLATFORM_ARM64)
|
||||
install(FILES ${TOP_DIR}/mindspore/lite/build/src/libmindspore-lite.so DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
|
||||
install(FILES ${TOP_DIR}/mindspore/core/ir/dtype/type_id.h DESTINATION ${INC_DIR}/ir/dtype COMPONENT ${COMPONENT_NAME})
|
||||
install(DIRECTORY ${TOP_DIR}/mindspore/lite/include/ DESTINATION ${INC_DIR} COMPONENT ${COMPONENT_NAME} FILES_MATCHING PATTERN "*.h")
|
||||
install(DIRECTORY ${TOP_DIR}/mindspore/lite/schema/ DESTINATION ${INC_DIR}/schema COMPONENT ${COMPONENT_NAME} FILES_MATCHING PATTERN "*.h" PATTERN "inner" EXCLUDE)
|
||||
install(FILES ${TOP_DIR}/mindspore/lite/build/nnacl/liboptimize.so DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
|
||||
install(DIRECTORY ${TOP_DIR}/third_party/flatbuffers/include DESTINATION ${FLATBF_DIR} COMPONENT ${COMPONENT_NAME})
|
||||
elseif (PLATFORM_ARM32)
|
||||
install(FILES ${TOP_DIR}/mindspore/lite/build/src/libmindspore-lite.so DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
|
||||
install(FILES ${TOP_DIR}/mindspore/core/ir/dtype/type_id.h DESTINATION ${INC_DIR}/ir/dtype COMPONENT ${COMPONENT_NAME})
|
||||
install(DIRECTORY ${TOP_DIR}/mindspore/lite/include/ DESTINATION ${INC_DIR} COMPONENT ${COMPONENT_NAME} FILES_MATCHING PATTERN "*.h")
|
||||
install(DIRECTORY ${TOP_DIR}/mindspore/lite/schema/ DESTINATION ${INC_DIR}/schema COMPONENT ${COMPONENT_NAME} FILES_MATCHING PATTERN "*.h" PATTERN "inner" EXCLUDE)
|
||||
install(DIRECTORY ${TOP_DIR}/third_party/flatbuffers/include DESTINATION ${FLATBF_DIR} COMPONENT ${COMPONENT_NAME})
|
||||
else ()
|
||||
install(FILES ${TOP_DIR}/mindspore/lite/build/src/libmindspore-lite.so DESTINATION ${LIB_DIR} COMPONENT ${RUN_X86_COMPONENT_NAME})
|
||||
install(FILES ${TOP_DIR}/third_party/protobuf/build/lib/libprotobuf.so.19.0.0 DESTINATION ${PROTOBF_DIR}/lib RENAME libprotobuf.so.19 COMPONENT ${COMPONENT_NAME})
|
||||
|
||||
endif ()
|
||||
|
||||
set(CPACK_GENERATOR TGZ)
|
||||
set(CPACK_ARCHIVE_COMPONENT_INSTALL ON)
|
||||
set(CPACK_COMPONENTS_ALL ${COMPONENT_NAME})
|
||||
if (PLATFORM_ARM64 OR PLATFORM_ARM32)
|
||||
set(CPACK_COMPONENTS_ALL ${COMPONENT_NAME})
|
||||
else ()
|
||||
set(CPACK_COMPONENTS_ALL ${COMPONENT_NAME} ${RUN_X86_COMPONENT_NAME})
|
||||
endif ()
|
||||
set(CPACK_PACKAGE_FILE_NAME ${MAIN_DIR})
|
||||
set(CPACK_PACKAGE_DIRECTORY ${TOP_DIR}/output/tmp)
|
||||
set(CPACK_PACKAGE_CHECKSUM SHA256)
|
||||
|
|
|
@ -28,6 +28,7 @@ elseif (PLATFORM_ARM32)
|
|||
else ()
|
||||
set(COMPONENT_NAME convert-ubuntu)
|
||||
endif()
|
||||
set(RUN_X86_COMPONENT_NAME runtime-x86-${PROCESS_UNIT})
|
||||
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17")
|
||||
set(TOP_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../..)
|
||||
|
@ -77,6 +78,8 @@ if (NOT PLATFORM_ARM64 AND NOT PLATFORM_ARM32)
|
|||
set(CMAKE_SHARED_LINKER_FLAGS "-Wl,-z,relro,-z,now -Wl,-z,noexecstack ${CMAKE_SHARED_LINKER_FLAGS}")
|
||||
set(CMAKE_EXE_LINKER_FLAGS "-Wl,-z,relro,-z,now -Wl,-z,noexecstack ${CMAKE_EXE_LINKER_FLAGS}")
|
||||
string(REPLACE " -g " " " CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fvisibility=hidden")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=hidden")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
|
|
|
@ -2,6 +2,42 @@ project(nnacl)
|
|||
|
||||
set(NNACL_DIR ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
set(TOP_DIR ${CMAKE_CURRENT_SOURCE_DIR}/..)
|
||||
set(SRC_DIR ${TOP_DIR}/src)
|
||||
set(LITE_SRC
|
||||
${SRC_DIR}/common/graph_util.cc
|
||||
${SRC_DIR}/common/ms_tensor_utils.cc
|
||||
${SRC_DIR}/runtime/allocator.cc
|
||||
${SRC_DIR}/runtime/runtime_api.cc
|
||||
${SRC_DIR}/runtime/thread_pool.cc
|
||||
${SRC_DIR}/runtime/workspace_pool.cc
|
||||
${SRC_DIR}/ir/tensor.cc
|
||||
${SRC_DIR}/context.cc
|
||||
${SRC_DIR}/executor.cc
|
||||
${SRC_DIR}/kernel_registry.cc
|
||||
${SRC_DIR}/lite_kernel.cc
|
||||
${SRC_DIR}/populate_parameter.cc
|
||||
${SRC_DIR}/scheduler.cc
|
||||
${SRC_DIR}/lite_session.cc
|
||||
${SRC_DIR}/model.cc
|
||||
)
|
||||
|
||||
if (SUPPORT_GPU)
|
||||
set(LITE_SRC
|
||||
${LITE_SRC}
|
||||
${SRC_DIR}/runtime/kernel/opencl/subgraph_opencl_kernel.cc
|
||||
${SRC_DIR}/runtime/kernel/opencl/utils.cc
|
||||
${SRC_DIR}/runtime/opencl/opencl_executor.cc
|
||||
${SRC_DIR}/runtime/opencl/opencl_allocator.cc
|
||||
${SRC_DIR}/runtime/opencl/opencl_runtime.cc
|
||||
${SRC_DIR}/runtime/opencl/opencl_wrapper.cc
|
||||
)
|
||||
endif ()
|
||||
|
||||
set(ANF_SRC
|
||||
${ANF_SRC}
|
||||
${SRC_DIR}/ir/meta_tensor_extends.cc
|
||||
)
|
||||
file(GLOB_RECURSE C_OPS_SRC ${SRC_DIR}/ops/*.cc)
|
||||
include_directories(NNACL_DIR)
|
||||
|
||||
########################### optimized files ###########################
|
||||
|
@ -25,11 +61,11 @@ if (PLATFORM_ARM64)
|
|||
string(REPLACE "-fvisibility=hidden" "-fvisibility=default" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=armv8.2-a+dotprod+fp16")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=armv8.2-a+dotprod+fp16")
|
||||
add_library(optimize SHARED ${OPTIMIZED_OPS})
|
||||
target_link_libraries(
|
||||
optimize
|
||||
mindspore-lite
|
||||
)
|
||||
add_library(optimize SHARED ${OPTIMIZED_OPS} ${LITE_SRC} ${ANF_SRC} ${C_OPS_SRC})
|
||||
add_library(cpu_kernel STATIC $<TARGET_OBJECTS:cpu_kernel_mid_>)
|
||||
target_link_libraries(optimize cpu_kernel)
|
||||
find_library(log-lib log)
|
||||
target_link_libraries(optimize ${log-lib})
|
||||
set_target_properties(optimize PROPERTIES CLEAN_DIRECT_OUTPUT 1)
|
||||
|
||||
add_custom_command(TARGET optimize POST_BUILD
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
#include "nnacl/fp16/reduce_fp16.h"
|
||||
#include "nnacl/errorcode.h"
|
||||
|
||||
int ReduceMean(const int outer_size, const int inner_size, const int axis_size, const float16_t *src_data,
|
||||
int ReduceMeanFp16(const int outer_size, const int inner_size, const int axis_size, const float16_t *src_data,
|
||||
const int *src_shape, float16_t *dst_data, const int tid, const int thread_num) {
|
||||
if (src_data == NULL || src_shape == NULL || dst_data == NULL) {
|
||||
return NNACL_NULL_PTR;
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
int ReduceMean(const int outer_size, const int inner_size, const int axis_size, const float16_t *src_data,
|
||||
int ReduceMeanFp16(const int outer_size, const int inner_size, const int axis_size, const float16_t *src_data,
|
||||
const int *src_shape, float16_t *dst_data, const int tid, const int thread_num);
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
|
|
@ -268,7 +268,7 @@ int ReduceMinLastAxis(const int outer_size, const int inner_size, const int axis
|
|||
RoundingDivideByPOT(SaturatingRoundingDoublingHighMul(
|
||||
(tmp - quant->in_zp_) * (1 << ((unsigned int)quant->in_out_left_shift_ + base_offset)),
|
||||
quant->in_out_multiplier_),
|
||||
quant->in_out_right_shift_ + base_offset);
|
||||
quant->in_out_right_shift_ + base_offset);
|
||||
if (isAddOverflow(tmp_scaled, quant->out_zp_)) {
|
||||
return NNACL_ERRCODE_ADD_OVERFLOW;
|
||||
}
|
||||
|
|
|
@ -333,7 +333,7 @@ int ArithmeticFP16CPUKernel::DoArithmetic(int task_id) {
|
|||
return RET_OK;
|
||||
}
|
||||
|
||||
int ArithmeticsRun(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int ArithmeticsRun(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto arithmetic_kernel = reinterpret_cast<ArithmeticFP16CPUKernel *>(cdata);
|
||||
auto error_code = arithmetic_kernel->DoArithmetic(task_id);
|
||||
if (error_code != RET_OK) {
|
||||
|
|
|
@ -170,7 +170,7 @@ int Convolution1x1FP16CPUKernel::RunImpl(int task_id) {
|
|||
return RET_OK;
|
||||
}
|
||||
|
||||
int Convolution1x1Fp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int Convolution1x1Fp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto conv = reinterpret_cast<Convolution1x1FP16CPUKernel *>(cdata);
|
||||
auto error_code = conv->RunImpl(task_id);
|
||||
if (error_code != RET_OK) {
|
||||
|
|
|
@ -197,7 +197,7 @@ int Convolution3x3FP16CPUKernel::RunImpl(int task_id) {
|
|||
return RET_OK;
|
||||
}
|
||||
|
||||
int Convolution3x3Fp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int Convolution3x3Fp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto conv = reinterpret_cast<Convolution3x3FP16CPUKernel *>(cdata);
|
||||
auto error_code = conv->RunImpl(task_id);
|
||||
if (error_code != RET_OK) {
|
||||
|
|
|
@ -150,7 +150,7 @@ int ConvolutionDepthwiseFp16CPUKernel::Execute(int task_id) {
|
|||
return RET_OK;
|
||||
}
|
||||
|
||||
int ConvDwFp16Run(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int ConvDwFp16Run(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto conv_dw_fp16 = reinterpret_cast<ConvolutionDepthwiseFp16CPUKernel *>(cdata);
|
||||
auto ret = conv_dw_fp16->Execute(task_id);
|
||||
if (ret != RET_OK) {
|
||||
|
|
|
@ -177,7 +177,7 @@ int ConvolutionFP16CPUKernel::RunImpl(int task_id) {
|
|||
return RET_OK;
|
||||
}
|
||||
|
||||
int ConvolutionFp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int ConvolutionFp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto conv = reinterpret_cast<ConvolutionFP16CPUKernel *>(cdata);
|
||||
auto error_code = conv->RunImpl(task_id);
|
||||
if (error_code != RET_OK) {
|
||||
|
|
|
@ -186,7 +186,7 @@ int ConvolutionSWFP16CPUKernel::RunImpl(int task_id) {
|
|||
return RET_OK;
|
||||
}
|
||||
|
||||
int ConvolutionSWFp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int ConvolutionSWFp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto conv = reinterpret_cast<ConvolutionSWFP16CPUKernel *>(cdata);
|
||||
auto error_code = conv->RunImpl(task_id);
|
||||
if (error_code != RET_OK) {
|
||||
|
|
|
@ -348,7 +348,7 @@ int ConvolutionWinogradFP16CPUKernel::RunImpl(int task_id) {
|
|||
return RET_OK;
|
||||
}
|
||||
|
||||
int ConvolutionWinogradFp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int ConvolutionWinogradFp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto conv = reinterpret_cast<ConvolutionWinogradFP16CPUKernel *>(cdata);
|
||||
auto error_code = conv->RunImpl(task_id);
|
||||
if (error_code != RET_OK) {
|
||||
|
|
|
@ -157,7 +157,7 @@ int DeconvolutionDepthwiseFp16CPUKernel::Execute(int task_id) {
|
|||
return RET_OK;
|
||||
}
|
||||
|
||||
int DeconvDwFp16Run(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int DeconvDwFp16Run(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto deconv_dw_fp16 = reinterpret_cast<DeconvolutionDepthwiseFp16CPUKernel *>(cdata);
|
||||
auto ret = deconv_dw_fp16->Execute(task_id);
|
||||
if (ret != RET_OK) {
|
||||
|
|
|
@ -137,7 +137,7 @@ void DeConvolutionFp16CPUKernel::FreeRunBuf() {
|
|||
return;
|
||||
}
|
||||
|
||||
int DeConvFp16Run(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int DeConvFp16Run(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto deconv = reinterpret_cast<DeConvolutionFp16CPUKernel *>(cdata);
|
||||
auto error_code = deconv->DoDeconv(task_id);
|
||||
if (error_code != RET_OK) {
|
||||
|
|
|
@ -89,7 +89,7 @@ int PoolingFp16CPUKernel::RunImpl(int task_id) {
|
|||
return RET_OK;
|
||||
}
|
||||
|
||||
int PoolingFp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int PoolingFp16Impl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto pooling = reinterpret_cast<PoolingFp16CPUKernel *>(cdata);
|
||||
auto error_code = pooling->RunImpl(task_id);
|
||||
if (error_code != RET_OK) {
|
||||
|
|
|
@ -49,7 +49,7 @@ int ReduceFp16CPUKernel::Init() {
|
|||
MS_LOG(ERROR) << "Reduce fp16 only support ReduceMode_ReduceMean";
|
||||
return RET_ERROR;
|
||||
}
|
||||
reducer_ = ReduceMean;
|
||||
reducer_ = ReduceMeanFp16;
|
||||
|
||||
if (!InferShapeDone()) {
|
||||
return RET_OK;
|
||||
|
@ -67,7 +67,7 @@ int ReduceFp16CPUKernel::CallReduceUnit(int task_id) {
|
|||
return ret;
|
||||
}
|
||||
|
||||
int ReduceImpl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int ReduceImpl(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto reduce = reinterpret_cast<ReduceFp16CPUKernel *>(cdata);
|
||||
auto error_code = reduce->CallReduceUnit(task_id);
|
||||
if (error_code != RET_OK) {
|
||||
|
|
|
@ -63,7 +63,7 @@ int SplitFp16CPUKernel::Split(int task_id) {
|
|||
return RET_OK;
|
||||
}
|
||||
|
||||
int SplitRun(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int SplitRun(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto g_kernel = reinterpret_cast<SplitFp16CPUKernel *>(cdata);
|
||||
auto ret = g_kernel->Split(task_id);
|
||||
if (ret != RET_OK) {
|
||||
|
|
|
@ -121,7 +121,7 @@ int TransposeFp16CPUKernel::TransposeParallel(int task_id) {
|
|||
return RET_OK;
|
||||
}
|
||||
|
||||
int TransposeRun(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
static int TransposeRun(int task_id, LiteParallelGroupEnv *penv, void *cdata) {
|
||||
auto g_kernel = reinterpret_cast<TransposeFp16CPUKernel *>(cdata);
|
||||
auto ret = g_kernel->TransposeParallel(task_id);
|
||||
if (ret != RET_OK) {
|
||||
|
|
|
@ -42,7 +42,7 @@ int UnstackCPUKernel::ReSize() {
|
|||
if (para->axis_ < 0) {
|
||||
para->axis_ += shape_size;
|
||||
}
|
||||
for (size_t i = 0; i < static_cast<size_t>(shape_size); i++) {
|
||||
for (size_t i = 0; i < shape_size; i++) {
|
||||
if (static_cast<int>(i) < para->axis_) {
|
||||
para->pre_dims_ *= input->DimensionSize(i);
|
||||
} else if (static_cast<int>(i) > para->axis_) {
|
||||
|
|
|
@ -8,8 +8,8 @@ include(${CMAKE_CURRENT_SOURCE_DIR}/../../../cmake/dependency_gtest.cmake)
|
|||
|
||||
string(REPLACE " -Werror " " " CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
|
||||
string(REPLACE " -Werror " " " CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
STRING(REPLACE " fvisibility=hidden " " -fvisibility=default " CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
|
||||
STRING(REPLACE " fvisibility=hidden " " -fvisibility=default " CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
STRING(REPLACE " -fvisibility=hidden " " -fvisibility=default " CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
|
||||
STRING(REPLACE " -fvisibility=hidden " " -fvisibility=default " CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
### anf src
|
||||
set(ANF_SRC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/../../core/ir/meta_tensor.cc
|
||||
|
|
|
@ -9,8 +9,8 @@ function Run_x86() {
|
|||
continue
|
||||
fi
|
||||
echo ${model_name}
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-convert-ubuntu'
|
||||
cd ${convertor_path}/mindspore-lite-${version}-convert-ubuntu || return 1
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86}
|
||||
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' || return 1
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1
|
||||
if [ $? = 0 ]; then
|
||||
|
@ -30,8 +30,8 @@ function Run_x86() {
|
|||
continue
|
||||
fi
|
||||
echo ${model_name}
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-convert-ubuntu'
|
||||
cd ${convertor_path}/mindspore-lite-${version}-convert-ubuntu || return 1
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86}
|
||||
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' || return 1
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1
|
||||
if [ $? = 0 ]; then
|
||||
|
@ -51,8 +51,8 @@ function Run_x86() {
|
|||
continue
|
||||
fi
|
||||
echo ${model_name}
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-convert-ubuntu'
|
||||
cd ${convertor_path}/mindspore-lite-${version}-convert-ubuntu || return 1
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86}
|
||||
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' || return 1
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1
|
||||
if [ $? = 0 ]; then
|
||||
|
@ -72,8 +72,8 @@ function Run_x86() {
|
|||
continue
|
||||
fi
|
||||
echo ${model_name}
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-convert-ubuntu'
|
||||
cd ${convertor_path}/mindspore-lite-${version}-convert-ubuntu || return 1
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86}
|
||||
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'_posttraining.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/quantTraining/mnist_calibration_data/00099.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'_posttraining.ms.out --warmUpLoopCount=1 --loopCount=1' || return 1
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}_posttraining.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/quantTraining/mnist_calibration_data/00099.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}_posttraining.ms.out --warmUpLoopCount=1 --loopCount=1
|
||||
if [ $? = 0 ]; then
|
||||
|
@ -93,8 +93,8 @@ function Run_x86() {
|
|||
continue
|
||||
fi
|
||||
echo ${model_name}
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-convert-ubuntu'
|
||||
cd ${convertor_path}/mindspore-lite-${version}-convert-ubuntu || return 1
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86}
|
||||
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1 --numThreads=1' || return 1
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 --numThreads=1
|
||||
if [ $? = 0 ]; then
|
||||
|
@ -114,8 +114,8 @@ function Run_x86() {
|
|||
continue
|
||||
fi
|
||||
echo ${model_name}
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-convert-ubuntu'
|
||||
cd ${convertor_path}/mindspore-lite-${version}-convert-ubuntu || return 1
|
||||
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86}
|
||||
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
|
||||
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' || return 1
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 --accuracyThreshold=1.5
|
||||
if [ $? = 0 ]; then
|
||||
|
@ -323,15 +323,23 @@ file_name=$(ls ${arm_path}/*runtime-arm64*.tar.gz)
|
|||
IFS="-" read -r -a file_name_array <<< "$file_name"
|
||||
version=${file_name_array[2]}
|
||||
IFS="." read -r -a suffix <<< "${file_name_array[-1]}"
|
||||
process_unit=${suffix[0]}
|
||||
process_unit_arm=${suffix[0]}
|
||||
|
||||
file_name=$(ls ${convertor_path}/*runtime-x86*.tar.gz)
|
||||
IFS="-" read -r -a file_name_array <<< "$file_name"
|
||||
IFS="." read -r -a suffix <<< "${file_name_array[-1]}"
|
||||
process_unit_x86=${suffix[0]}
|
||||
|
||||
# Unzip arm
|
||||
cd ${arm_path} || exit 1
|
||||
mkdir mindspore-lite-${version}-runtime-arm64-${process_unit}
|
||||
tar -zxf mindspore-lite-${version}-runtime-arm64-${process_unit}.tar.gz -C mindspore-lite-${version}-runtime-arm64-${process_unit} --strip-components 1 || exit 1
|
||||
mkdir mindspore-lite-${version}-runtime-arm64-${process_unit_arm}
|
||||
tar -zxf mindspore-lite-${version}-runtime-arm64-${process_unit_arm}.tar.gz -C mindspore-lite-${version}-runtime-arm64-${process_unit_arm} --strip-components 1 || exit 1
|
||||
|
||||
# Unzip convertor
|
||||
# Unzip x86 runtime and convertor
|
||||
cd ${convertor_path} || exit 1
|
||||
mkdir mindspore-lite-${version}-runtime-x86-${process_unit_x86}
|
||||
tar -zxf mindspore-lite-${version}-runtime-x86-${process_unit_x86}.tar.gz -C mindspore-lite-${version}-runtime-x86-${process_unit_x86} --strip-components 1 || exit 1
|
||||
|
||||
mkdir mindspore-lite-${version}-convert-ubuntu
|
||||
tar -zxf mindspore-lite-${version}-convert-ubuntu.tar.gz -C mindspore-lite-${version}-convert-ubuntu --strip-components 1 || exit 1
|
||||
cd ${convertor_path}/mindspore-lite-${version}-convert-ubuntu || exit 1
|
||||
|
@ -439,9 +447,9 @@ rm -rf ${basepath}/benchmark_test
|
|||
mkdir -p ${basepath}/benchmark_test
|
||||
benchmark_test_path=${basepath}/benchmark_test
|
||||
cd ${benchmark_test_path} || exit 1
|
||||
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit}/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
|
||||
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit}/lib/liboptimize.so ${benchmark_test_path}/liboptimize.so || exit 1
|
||||
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit}/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
|
||||
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
|
||||
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/lib/liboptimize.so ${benchmark_test_path}/liboptimize.so || exit 1
|
||||
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1
|
||||
|
||||
# Copy the MindSpore models:
|
||||
cp -a ${ms_models_path}/*.ms ${benchmark_test_path} || exit 1
|
||||
|
|
|
@ -15,6 +15,10 @@ if (PLATFORM_ARM32 OR PLATFORM_ARM64)
|
|||
else()
|
||||
target_link_libraries(benchmark mindspore-lite pthread)
|
||||
endif()
|
||||
|
||||
install(TARGETS benchmark
|
||||
RUNTIME DESTINATION ${MAIN_DIR}/benchmark COMPONENT ${COMPONENT_NAME})
|
||||
if (PLATFORM_ARM32 OR PLATFORM_ARM64)
|
||||
install(TARGETS benchmark
|
||||
RUNTIME DESTINATION ${MAIN_DIR}/benchmark COMPONENT ${COMPONENT_NAME})
|
||||
else()
|
||||
install(TARGETS benchmark
|
||||
RUNTIME DESTINATION ${MAIN_DIR}/benchmark COMPONENT ${RUN_X86_COMPONENT_NAME})
|
||||
endif()
|
||||
|
|
|
@ -185,7 +185,7 @@ target_link_libraries(converter_lite PRIVATE
|
|||
mindspore::eigen
|
||||
)
|
||||
|
||||
if (NOT PLATFORM_arm64 AND NOT PLATFORM_arm32)
|
||||
if (NOT PLATFORM_ARM64 AND NOT PLATFORM_ARM32)
|
||||
install(TARGETS converter_lite
|
||||
RUNTIME DESTINATION ${MAIN_DIR}/converter COMPONENT ${COMPONENT_NAME})
|
||||
endif ()
|
||||
|
|
|
@ -373,7 +373,7 @@ STATUS QuantFilter(ParamValueLitePtr weight, std::shared_ptr<PrimitiveC> primiti
|
|||
float min = FLT_MAX;
|
||||
float max = -FLT_MAX;
|
||||
// find min and max
|
||||
for (uint32_t j = 0; j < one_filter_size; j++) {
|
||||
for (size_t j = 0; j < one_filter_size; j++) {
|
||||
auto index = j + i * one_filter_size;
|
||||
if (index >= elem_count) {
|
||||
MS_LOG(ERROR) << "over flow!";
|
||||
|
|
Loading…
Reference in New Issue