fix ci bug

This commit is contained in:
yeyunpeng 2021-01-21 19:58:35 +08:00
parent fd6dc1b060
commit 944500d89f
6 changed files with 12 additions and 34 deletions

View File

@ -10,9 +10,6 @@ set(RUNTIME_INC_DIR ${RUNTIME_PKG_NAME}/include)
set(CONVERTER_LIB_DIR ${CONVERTER_PKG_NAME}/lib)
set(TURBO_DIR ${RUNTIME_PKG_NAME}/minddata/third_party/libjpeg-turbo)
set(OPENCV_DIR ${RUNTIME_PKG_NAME}/minddata/third_party/opencv)
set(PROTOBF_DIR ${RUNTIME_PKG_NAME}/third_party/protobuf)
set(FLATBF_DIR ${RUNTIME_PKG_NAME}/third_party/flatbuffers)
set(MIND_DATA_INC_DIR ${RUNTIME_PKG_NAME}/minddata/include)
set(MIND_DATA_LIB_DIR ${RUNTIME_PKG_NAME}/minddata/lib)
@ -112,11 +109,6 @@ if(PLATFORM_ARM64)
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/core/ir/dtype/type_id.h DESTINATION ${RUNTIME_INC_DIR}/ir/dtype
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/schema/model_generated.h DESTINATION ${RUNTIME_INC_DIR}/schema
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/schema/ops_generated.h DESTINATION ${RUNTIME_INC_DIR}/schema
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(DIRECTORY ${flatbuffers_INC} DESTINATION ${FLATBF_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
if(ENABLE_TOOLS)
install(TARGETS benchmark RUNTIME DESTINATION ${RUNTIME_PKG_NAME}/benchmark COMPONENT ${RUNTIME_COMPONENT_NAME})
endif()
@ -136,11 +128,6 @@ elseif(PLATFORM_ARM32)
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/core/ir/dtype/type_id.h DESTINATION ${RUNTIME_INC_DIR}/ir/dtype
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/schema/model_generated.h DESTINATION ${RUNTIME_INC_DIR}/schema
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/schema/ops_generated.h DESTINATION ${RUNTIME_INC_DIR}/schema
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(DIRECTORY ${flatbuffers_INC} DESTINATION ${FLATBF_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
if(ENABLE_TOOLS)
install(TARGETS benchmark RUNTIME DESTINATION ${RUNTIME_PKG_NAME}/benchmark COMPONENT ${RUNTIME_COMPONENT_NAME})
endif()
@ -173,10 +160,6 @@ elseif(WIN32)
install(DIRECTORY ${TOP_DIR}/mindspore/lite/include/ DESTINATION ${RUNTIME_INC_DIR}
COMPONENT ${RUNTIME_COMPONENT_NAME} FILES_MATCHING PATTERN "*.h" PATTERN "train*" EXCLUDE)
endif()
install(FILES ${TOP_DIR}/build/mindspore/schema/model_generated.h DESTINATION ${RUNTIME_INC_DIR}/schema
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/build/mindspore/schema/ops_generated.h DESTINATION ${RUNTIME_INC_DIR}/schema
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/core/ir/dtype/type_id.h DESTINATION ${RUNTIME_INC_DIR}/ir/dtype
COMPONENT ${RUNTIME_COMPONENT_NAME})
set(WIN_LIB_DIR_RUN_X86 ${RUNTIME_PKG_NAME}/benchmark)
@ -197,13 +180,8 @@ else()
install(DIRECTORY ${TOP_DIR}/mindspore/lite/include/ DESTINATION ${RUNTIME_INC_DIR}
COMPONENT ${RUNTIME_COMPONENT_NAME} FILES_MATCHING PATTERN "*.h" PATTERN "train*" EXCLUDE)
endif()
install(FILES ${TOP_DIR}/mindspore/lite/build/schema/model_generated.h DESTINATION ${RUNTIME_INC_DIR}/schema
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/schema/ops_generated.h DESTINATION ${RUNTIME_INC_DIR}/schema
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/core/ir/dtype/type_id.h DESTINATION ${RUNTIME_INC_DIR}/ir/dtype
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(DIRECTORY ${flatbuffers_INC} DESTINATION ${FLATBF_DIR} COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/src/libmindspore-lite.so DESTINATION ${RUNTIME_LIB_DIR}
COMPONENT ${RUNTIME_COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/src/libmindspore-lite.a DESTINATION ${RUNTIME_LIB_DIR}

View File

@ -32,9 +32,6 @@ endif()
set(TOP_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../../../../../../..)
set(LITE_DIR ${TOP_DIR}/mindspore/lite)
include(${TOP_DIR}/cmake/utils.cmake)
include(${TOP_DIR}/cmake/external_libs/flatbuffers.cmake)
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
include_directories(${LITE_DIR}) ## lite include
include_directories(${TOP_DIR}/mindspore/core/) ## core include

View File

@ -36,6 +36,7 @@ void Concat(void **input, int input_num, int axis, int **inputs_output_shape, si
int offset = UP_DIV(input_stride, thread_num);
int count = input_stride - offset * task_id;
if (count <= 0) {
axis_offset += inputs_output_shape[i][axis];
continue;
}
count = MSMIN(offset, count);

View File

@ -22,7 +22,9 @@ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=armv8.2-a+dotprod+fp16")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=armv8.2-a+dotprod+fp16")
add_library(nnacl_optimize_mid OBJECT ${SDOT_FILES})
add_dependencies(nnacl_optimize_mid fbs_src)
if (ENABLE_FP16)
if(ENABLE_FP16)
add_library(nnacl_fp16_mid OBJECT ${FP16_FILES})
endif ()
add_dependencies(nnacl_fp16_mid fbs_src)
endif()

View File

@ -148,10 +148,10 @@ int LiteKernel::Run(const KernelCallBack &before, const KernelCallBack &after) {
// Support ZeroShape
size_t zero_shape_num = 0;
for (auto tensor : this->out_tensors_) {
for (auto dim : tensor->shape()) {
if (dim == 0) {
for (size_t i = 0; i < tensor->shape().size(); i++) {
if (tensor->shape()[i] == 0) {
zero_shape_num++;
continue;
break;
}
}
}
@ -165,7 +165,7 @@ int LiteKernel::Run(const KernelCallBack &before, const KernelCallBack &after) {
if (after != nullptr) {
if (!after(TensorVectorCast(this->in_tensors_), TensorVectorCast(this->out_tensors_),
{this->name_, this->type_str()})) {
MS_LOG(ERROR) << "run kernel after_callback failed, name: " << this->name_;
MS_LOG(WARNING) << "run kernel after_callback failed, name: " << this->name_;
}
}
return RET_OK;

View File

@ -126,10 +126,10 @@ int GraphDefTransform::Transform(const converter::Flags &ctx) {
}
formatTransPass->SetQuantType(ctx.quantType);
formatTransPass->SetFmk(ctx.fmk);
formatTransOptimizer.AddPass(formatTransPass);
formatTransOptimizer.AddPass(new (std::nothrow) SubgraphNodePass(old_nodes));
formatTransOptimizer.AddPass(new (std::nothrow) TopologicalSortPass());
if (ctx.fmk != converter::FmkType_TF) {
formatTransOptimizer.AddPass(formatTransPass);
formatTransOptimizer.AddPass(new (std::nothrow) SubgraphNodePass(old_nodes));
formatTransOptimizer.AddPass(new (std::nothrow) TopologicalSortPass());
formatTransOptimizer.AddPass(new (std::nothrow) InferShapePass());
}
status = formatTransOptimizer.Run(graphDefT);