forked from mindspore-Ecosystem/mindspore
fix package
This commit is contained in:
parent
2d7e4adf44
commit
0e2d1d9749
|
@ -255,9 +255,9 @@ build_lite() {
|
|||
else
|
||||
make -j$THREAD_NUM && make install
|
||||
cp -r ${BASEPATH}/output/tmp/mindspore*/runtime ${BASEPATH}/mindspore/lite/tools/benchmark
|
||||
cmake ${LITE_CMAKE_ARGS} -DBUILD_FIRST=off --target benchmark "${BASEPATH}/mindspore/lite"
|
||||
|
||||
make -j$THREAD_NUM && make install && make package
|
||||
cmake ${LITE_CMAKE_ARGS} -DBUILD_FIRST=off "${BASEPATH}/mindspore/lite"
|
||||
cmake --build "${BASEPATH}/mindspore/lite/build" --target benchmark -j$THREAD_NUM
|
||||
make install && make package
|
||||
if [[ "${local_lite_platform}" == "x86_64" ]]; then
|
||||
if [ "${JAVA_HOME}" ]; then
|
||||
echo -e "\e[31mJAVA_HOME=$JAVA_HOME \e[0m"
|
||||
|
|
|
@ -639,16 +639,16 @@ int SvpSysInit() {
|
|||
HI_S32 ret = HI_SUCCESS;
|
||||
VB_CONFIG_S struVbConf;
|
||||
ret = HI_MPI_SYS_Exit();
|
||||
if (HI_SUCCESS != ret) {
|
||||
if (ret != HI_SUCCESS) {
|
||||
MS_LOG(ERROR) << "HI_MPI_SYS_Exit failed!";
|
||||
return RET_ERROR;
|
||||
}
|
||||
|
||||
ret = HI_MPI_VB_Exit();
|
||||
if (HI_SUCCESS != ret) {
|
||||
if (ret != HI_SUCCESS) {
|
||||
MS_LOG(WARNING) << "HI_MPI_VB_Exit failed!";
|
||||
ret = HI_MPI_SYS_Init();
|
||||
if (HI_SUCCESS != ret) {
|
||||
if (ret != HI_SUCCESS) {
|
||||
MS_LOG(ERROR) << "Error:HI_MPI_SYS_Init failed!";
|
||||
return RET_ERROR;
|
||||
}
|
||||
|
@ -661,19 +661,19 @@ int SvpSysInit() {
|
|||
struVbConf.astCommPool[1].u32BlkCnt = 1;
|
||||
|
||||
ret = HI_MPI_VB_SetConfig((const VB_CONFIG_S *)&struVbConf);
|
||||
if (HI_SUCCESS != ret) {
|
||||
if (ret != HI_SUCCESS) {
|
||||
MS_LOG(ERROR) << "Error:HI_MPI_VB_SetConf failed!";
|
||||
return RET_ERROR;
|
||||
}
|
||||
|
||||
ret = HI_MPI_VB_Init();
|
||||
if (HI_SUCCESS != ret) {
|
||||
if (ret != HI_SUCCESS) {
|
||||
MS_LOG(ERROR) << "Error:HI_MPI_VB_Init failed!";
|
||||
return RET_ERROR;
|
||||
}
|
||||
|
||||
ret = HI_MPI_SYS_Init();
|
||||
if (HI_SUCCESS != ret) {
|
||||
if (ret != HI_SUCCESS) {
|
||||
MS_LOG(ERROR) << "Error:HI_MPI_SYS_Init failed!";
|
||||
return RET_ERROR;
|
||||
}
|
||||
|
@ -685,13 +685,13 @@ int SvpSysExit() {
|
|||
HI_S32 ret = HI_SUCCESS;
|
||||
|
||||
ret = HI_MPI_SYS_Exit();
|
||||
if (HI_SUCCESS != ret) {
|
||||
if (ret != HI_SUCCESS) {
|
||||
MS_LOG(ERROR) << "HI_MPI_SYS_Exit failed!";
|
||||
return RET_ERROR;
|
||||
}
|
||||
|
||||
ret = HI_MPI_VB_Exit();
|
||||
if (HI_SUCCESS != ret) {
|
||||
if (ret != HI_SUCCESS) {
|
||||
MS_LOG(WARNING) << "HI_MPI_VB_Exit failed!";
|
||||
return RET_OK;
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ int CustomCPUKernel::Prepare() {
|
|||
|
||||
if (nnie::NNIEManager::GetInstance()->Init(reinterpret_cast<char *>(inputs_[inputs_.size() - 1].MutableData()),
|
||||
static_cast<int>(inputs_[inputs_.size() - 1].ElementNum()), inputs_)) {
|
||||
// LOGW("Load WK Model Fail");
|
||||
LOGI("Load WK Model Fail");
|
||||
return RET_OK;
|
||||
}
|
||||
load_model_ = true;
|
||||
|
@ -73,12 +73,12 @@ int CustomCPUKernel::Execute() {
|
|||
}
|
||||
run_seg_ = seg_id_;
|
||||
|
||||
if (nnie::NNIEManager::GetInstance()->FillData(&inputs_, run_seg_)) {
|
||||
if (nnie::NNIEManager::GetInstance()->FillData(&inputs_, run_seg_) != RET_OK) {
|
||||
LOGE("Fail Fill Data");
|
||||
return RET_ERROR;
|
||||
}
|
||||
|
||||
if (nnie::NNIEManager::GetInstance()->Run(&outputs_, run_seg_, outputs_shapes_)) {
|
||||
if (nnie::NNIEManager::GetInstance()->Run(&outputs_, run_seg_, outputs_shapes_) != RET_OK) {
|
||||
LOGE("Fail WK Run");
|
||||
return RET_ERROR;
|
||||
}
|
||||
|
|
|
@ -70,9 +70,6 @@ int GetCustomShape(const mindspore::schema::Custom *op, const std::string &attr,
|
|||
char *save_ptr = nullptr;
|
||||
res = strtok_r(buf, delims, &save_ptr);
|
||||
while (res != nullptr) {
|
||||
// 待补完
|
||||
// outputs[id]->format_ = input->format_;
|
||||
// outputs[id]->data_type_ = kNumberTypeFloat32;
|
||||
int64_t ndims = strtol(res, &res, kDecimal);
|
||||
int j = 0;
|
||||
std::vector<int64_t> shape;
|
||||
|
|
|
@ -791,8 +791,8 @@ int NnieCommCreate(NnieRunCfg *nnie_run_cfg, char *model_buf, int size,
|
|||
j = GetFillIndex(inputs, inputs.size() - 1, model->model_.astSeg[0].astSrcNode[0].szName);
|
||||
if (j == (inputs.size() - 1)) {
|
||||
j = 0;
|
||||
// LOGW("input tensor name(%s) can't match wk node name(%s).", inputs[0].Name().c_str(),
|
||||
// model->model_.astSeg[0].astSrcNode[0].szName);
|
||||
LOGI("input tensor name(%s) can't match wk node name(%s).", inputs[0].Name().c_str(),
|
||||
model->model_.astSeg[0].astSrcNode[0].szName);
|
||||
}
|
||||
if (CheckMsShapeN(nnie_run_cfg, inputs[j].Shape(), model->model_.astSeg[0].astSrcNode[0]) != RET_OK) {
|
||||
return RET_ERROR;
|
||||
|
|
|
@ -107,8 +107,8 @@ int NNIEManager::GetOutputData(std::vector<mindspore::MSTensor> *outputs,
|
|||
j = GetFillIndex(*outputs, output_size, nnie_cfg_.param_.model_->astSeg[seg_idx].astDstNode[i].szName);
|
||||
if (j == output_size) {
|
||||
j = i;
|
||||
// LOGW("output tensor name(%s) can't match wk node name(%s).", (*outputs)[j].Name().c_str(),
|
||||
// nnie_cfg_.param_.model_->astSeg[seg_idx].astDstNode[i].szName);
|
||||
LOGI("output tensor name(%s) can't match wk node name(%s).", (*outputs)[j].Name().c_str(),
|
||||
nnie_cfg_.param_.model_->astSeg[seg_idx].astDstNode[i].szName);
|
||||
}
|
||||
|
||||
auto input_data_type = (*outputs)[j].DataType();
|
||||
|
@ -196,8 +196,8 @@ int NNIEManager::FillData(std::vector<mindspore::MSTensor> *inputs, unsigned int
|
|||
continue;
|
||||
} else {
|
||||
j = i;
|
||||
// LOGW("input tensor name(%s) can't match wk node name(%s).", (*inputs)[i].Name().c_str(),
|
||||
// nnie_cfg_.param_.model_->astSeg[seg_id].astSrcNode[i].szName);
|
||||
LOGI("input tensor name(%s) can't match wk node name(%s).", (*inputs)[i].Name().c_str(),
|
||||
nnie_cfg_.param_.model_->astSeg[seg_id].astSrcNode[i].szName);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -37,6 +37,14 @@
|
|||
} \
|
||||
} while (0)
|
||||
|
||||
#define LOGI(format, ...) \
|
||||
do { \
|
||||
if (0) { \
|
||||
fprintf(stderr, "\n[Warning] " LOG_TAG1 " [" __FILE__ ":%d] %s] ", __LINE__, __FUNCTION__); \
|
||||
fprintf(stderr, format, ##__VA_ARGS__); \
|
||||
} \
|
||||
} while (0)
|
||||
|
||||
constexpr int kMaxSize = 1024;
|
||||
constexpr int kDecimal = 10;
|
||||
|
||||
|
|
|
@ -55,10 +55,6 @@ Status ProposalInterface::Infer(std::vector<mindspore::MSTensor> *inputs, std::v
|
|||
|
||||
size_t id = 0;
|
||||
while (id < outputs->size()) {
|
||||
// 待补完
|
||||
// outputs[id]->format_ = input->format_;
|
||||
// outputs[id]->data_type_ = kNumberTypeFloat32;
|
||||
// 设置type为int
|
||||
std::vector<int64_t> shape{-1, COORDI_NUM};
|
||||
(*outputs)[id].SetShape(shape);
|
||||
(*outputs)[id].SetDataType(DataType::kNumberTypeFloat32);
|
||||
|
|
|
@ -46,8 +46,8 @@ function linux_release_package()
|
|||
mkdir -p ${output_path}/release/linux/nnie/
|
||||
cp ${input_path}/ubuntu_x86/avx/*.tar.gz* ${output_path}/release/linux/
|
||||
|
||||
cp ${input_path}/linux_aarch32/*.tar.gz* ${output_path}/release/linux/
|
||||
cp ${input_path}/ubuntu_x86/nnie/3516D/*.tar.gz* ${output_path}/release/linux/nnie/
|
||||
cp -r ${input_path}/linux_aarch32/nnie/Hi* ${output_path}/release/linux/nnie/
|
||||
cp ${input_path}/ubuntu_x86/nnie/Hi3516D/*.tar.gz* ${output_path}/release/linux/nnie/
|
||||
}
|
||||
|
||||
function windows_release_package()
|
||||
|
|
Loading…
Reference in New Issue