diff --git a/model_zoo/official/cv/unet3d/README.md b/model_zoo/official/cv/unet3d/README.md index a5cdac62ce7..5de7b472449 100644 --- a/model_zoo/official/cv/unet3d/README.md +++ b/model_zoo/official/cv/unet3d/README.md @@ -20,6 +20,10 @@ - [Evaluation](#evaluation) - [Evaluating on Ascend](#training-on-ascend) - [Evaluating on GPU](#training-on-gpu) + - [Inference Process](#inference-process) + - [Export MindIR](#export-mindir) + - [Infer on Ascend310](#infer-on-ascend310) + - [result](#result) - [Model Description](#model-description) - [Performance](#performance) - [Evaluation Performance](#evaluation-performance) @@ -348,6 +352,40 @@ eval average dice is 0.9502010010453671 ``` +## Inference Process + +### [Export MindIR](#contents) + +```shell +python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --file_format [FILE_FORMAT] +``` + +The ckpt_file parameter is required, +`file_format` should be in ["AIR", "MINDIR"] + +### Infer on Ascend310 + +Before performing inference, the mindir file must be exported by `export.py` script. We only provide an example of inference using MINDIR model. + +```shell +# Ascend310 inference +bash run_infer_310.sh [MINDIR_PATH] [NEED_PREPROCESS] [DEVICE_ID] +``` + +- `NEED_PREPROCESS` means weather need preprocess or not, it's value is 'y' or 'n'. +- `DEVICE_ID` is optional, default value is 0. + +### result + +Inference result is saved in current path, you can find result like this in acc.log file. + +```shell + +# grep "eval average dice is:" acc.log +eval average dice is 0.9502010010453671 + +``` + ## [Model Description](#contents) ### [Performance](#contents) diff --git a/model_zoo/official/cv/unet3d/ascend310_infer/CMakeLists.txt b/model_zoo/official/cv/unet3d/ascend310_infer/CMakeLists.txt new file mode 100644 index 00000000000..ee3c8544734 --- /dev/null +++ b/model_zoo/official/cv/unet3d/ascend310_infer/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 3.14.1) +project(Ascend310Infer) +add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 -g -std=c++17 -Werror -Wall -fPIE -Wl,--allow-shlib-undefined") +set(PROJECT_SRC_ROOT ${CMAKE_CURRENT_LIST_DIR}/) +option(MINDSPORE_PATH "mindspore install path" "") +include_directories(${MINDSPORE_PATH}) +include_directories(${MINDSPORE_PATH}/include) +include_directories(${PROJECT_SRC_ROOT}) +find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) +file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) + +add_executable(main src/main.cc src/utils.cc) +target_link_libraries(main ${MS_LIB} ${MD_LIB} gflags) diff --git a/model_zoo/official/cv/unet3d/ascend310_infer/build.sh b/model_zoo/official/cv/unet3d/ascend310_infer/build.sh new file mode 100644 index 00000000000..285514e19f2 --- /dev/null +++ b/model_zoo/official/cv/unet3d/ascend310_infer/build.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +if [ -d out ]; then + rm -rf out +fi + +mkdir out +cd out || exit + +if [ -f "Makefile" ]; then + make clean +fi + +cmake .. \ + -DMINDSPORE_PATH="`pip3.7 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`" +make diff --git a/model_zoo/official/cv/unet3d/ascend310_infer/inc/utils.h b/model_zoo/official/cv/unet3d/ascend310_infer/inc/utils.h new file mode 100644 index 00000000000..efebe03a8c1 --- /dev/null +++ b/model_zoo/official/cv/unet3d/ascend310_infer/inc/utils.h @@ -0,0 +1,32 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_INFERENCE_UTILS_H_ +#define MINDSPORE_INFERENCE_UTILS_H_ + +#include +#include +#include +#include +#include +#include "include/api/types.h" + +std::vector GetAllFiles(std::string_view dirName); +DIR *OpenDir(std::string_view dirName); +std::string RealPath(std::string_view path); +mindspore::MSTensor ReadFileToTensor(const std::string &file); +int WriteResult(const std::string& imageFile, const std::vector &outputs); +#endif diff --git a/model_zoo/official/cv/unet3d/ascend310_infer/src/main.cc b/model_zoo/official/cv/unet3d/ascend310_infer/src/main.cc new file mode 100644 index 00000000000..dffc0b56262 --- /dev/null +++ b/model_zoo/official/cv/unet3d/ascend310_infer/src/main.cc @@ -0,0 +1,129 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "include/api/model.h" +#include "include/api/context.h" +#include "include/api/types.h" +#include "include/api/serialization.h" +#include "include/dataset/execute.h" +#include "include/dataset/vision.h" +#include "inc/utils.h" + +using mindspore::Context; +using mindspore::Serialization; +using mindspore::Model; +using mindspore::Status; +using mindspore::MSTensor; +using mindspore::dataset::Execute; +using mindspore::ModelType; +using mindspore::GraphCell; +using mindspore::kSuccess; + +DEFINE_string(mindir_path, "", "mindir path"); +DEFINE_string(input0_path, ".", "input0 path"); +DEFINE_int32(device_id, 0, "device id"); + +int main(int argc, char **argv) { + gflags::ParseCommandLineFlags(&argc, &argv, true); + if (RealPath(FLAGS_mindir_path).empty()) { + std::cout << "Invalid mindir" << std::endl; + return 1; + } + + auto context = std::make_shared(); + auto ascend310 = std::make_shared(); + ascend310->SetDeviceID(FLAGS_device_id); + context->MutableDeviceInfo().push_back(ascend310); + mindspore::Graph graph; + Serialization::Load(FLAGS_mindir_path, ModelType::kMindIR, &graph); + + Model model; + Status ret = model.Build(GraphCell(graph), context); + if (ret != kSuccess) { + std::cout << "ERROR: Build failed." << std::endl; + return 1; + } + + std::vector model_inputs = model.GetInputs(); + if (model_inputs.empty()) { + std::cout << "Invalid model, inputs is empty." << std::endl; + return 1; + } + + auto input0_files = GetAllFiles(FLAGS_input0_path); + if (input0_files.empty()) { + std::cout << "ERROR: input data empty." << std::endl; + return 1; + } + + std::map costTime_map; + size_t size = input0_files.size(); + + for (size_t i = 0; i < size; ++i) { + struct timeval start = {0}; + struct timeval end = {0}; + double startTimeMs; + double endTimeMs; + std::vector inputs; + std::vector outputs; + std::cout << "Start predict input files:" << input0_files[i] << std::endl; + + auto input0 = ReadFileToTensor(input0_files[i]); + inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), + input0.Data().get(), input0.DataSize()); + + gettimeofday(&start, nullptr); + ret = model.Predict(inputs, &outputs); + gettimeofday(&end, nullptr); + if (ret != kSuccess) { + std::cout << "Predict " << input0_files[i] << " failed." << std::endl; + return 1; + } + startTimeMs = (1.0 * start.tv_sec * 1000000 + start.tv_usec) / 1000; + endTimeMs = (1.0 * end.tv_sec * 1000000 + end.tv_usec) / 1000; + costTime_map.insert(std::pair(startTimeMs, endTimeMs)); + WriteResult(input0_files[i], outputs); + } + double average = 0.0; + int inferCount = 0; + + for (auto iter = costTime_map.begin(); iter != costTime_map.end(); iter++) { + double diff = 0.0; + diff = iter->second - iter->first; + average += diff; + inferCount++; + } + average = average / inferCount; + std::stringstream timeCost; + timeCost << "NN inference cost average time: "<< average << " ms of infer_count " << inferCount << std::endl; + std::cout << "NN inference cost average time: "<< average << "ms of infer_count " << inferCount << std::endl; + std::string fileName = "./time_Result" + std::string("/test_perform_static.txt"); + std::ofstream fileStream(fileName.c_str(), std::ios::trunc); + fileStream << timeCost.str(); + fileStream.close(); + costTime_map.clear(); + return 0; +} diff --git a/model_zoo/official/cv/unet3d/ascend310_infer/src/utils.cc b/model_zoo/official/cv/unet3d/ascend310_infer/src/utils.cc new file mode 100644 index 00000000000..c947e4d5f45 --- /dev/null +++ b/model_zoo/official/cv/unet3d/ascend310_infer/src/utils.cc @@ -0,0 +1,129 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include "inc/utils.h" + +using mindspore::MSTensor; +using mindspore::DataType; + +std::vector GetAllFiles(std::string_view dirName) { + struct dirent *filename; + DIR *dir = OpenDir(dirName); + if (dir == nullptr) { + return {}; + } + std::vector res; + while ((filename = readdir(dir)) != nullptr) { + std::string dName = std::string(filename->d_name); + if (dName == "." || dName == ".." || filename->d_type != DT_REG) { + continue; + } + res.emplace_back(std::string(dirName) + "/" + filename->d_name); + } + std::sort(res.begin(), res.end()); + for (auto &f : res) { + std::cout << "image file: " << f << std::endl; + } + return res; +} + +int WriteResult(const std::string& imageFile, const std::vector &outputs) { + std::string homePath = "./result_Files"; + for (size_t i = 0; i < outputs.size(); ++i) { + size_t outputSize; + std::shared_ptr netOutput; + netOutput = outputs[i].Data(); + outputSize = outputs[i].DataSize(); + int pos = imageFile.rfind('/'); + std::string fileName(imageFile, pos + 1); + fileName.replace(fileName.find('.'), fileName.size() - fileName.find('.'), '_' + std::to_string(i) + ".bin"); + std::string outFileName = homePath + "/" + fileName; + FILE * outputFile = fopen(outFileName.c_str(), "wb"); + fwrite(netOutput.get(), outputSize, sizeof(char), outputFile); + fclose(outputFile); + outputFile = nullptr; + } + return 0; +} + +mindspore::MSTensor ReadFileToTensor(const std::string &file) { + if (file.empty()) { + std::cout << "Pointer file is nullptr" << std::endl; + return mindspore::MSTensor(); + } + + std::ifstream ifs(file); + if (!ifs.good()) { + std::cout << "File: " << file << " is not exist" << std::endl; + return mindspore::MSTensor(); + } + + if (!ifs.is_open()) { + std::cout << "File: " << file << "open failed" << std::endl; + return mindspore::MSTensor(); + } + + ifs.seekg(0, std::ios::end); + size_t size = ifs.tellg(); + mindspore::MSTensor buffer(file, mindspore::DataType::kNumberTypeUInt8, {static_cast(size)}, nullptr, size); + + ifs.seekg(0, std::ios::beg); + ifs.read(reinterpret_cast(buffer.MutableData()), size); + ifs.close(); + + return buffer; +} + + +DIR *OpenDir(std::string_view dirName) { + if (dirName.empty()) { + std::cout << " dirName is null ! " << std::endl; + return nullptr; + } + std::string realPath = RealPath(dirName); + struct stat s; + lstat(realPath.c_str(), &s); + if (!S_ISDIR(s.st_mode)) { + std::cout << "dirName is not a valid directory !" << std::endl; + return nullptr; + } + DIR *dir; + dir = opendir(realPath.c_str()); + if (dir == nullptr) { + std::cout << "Can not open dir " << dirName << std::endl; + return nullptr; + } + std::cout << "Successfully opened the dir " << dirName << std::endl; + return dir; +} + +std::string RealPath(std::string_view path) { + char realPathMem[PATH_MAX] = {0}; + char *realPathRet = nullptr; + realPathRet = realpath(path.data(), realPathMem); + + if (realPathRet == nullptr) { + std::cout << "File: " << path << " is not exist."; + return ""; + } + + std::string realPath(realPathMem); + std::cout << path << " realpath is: " << realPath << std::endl; + return realPath; +} diff --git a/model_zoo/official/cv/unet3d/default_config.yaml b/model_zoo/official/cv/unet3d/default_config.yaml index 9292e77eb8b..5e6c53fb9cd 100644 --- a/model_zoo/official/cv/unet3d/default_config.yaml +++ b/model_zoo/official/cv/unet3d/default_config.yaml @@ -13,7 +13,7 @@ output_path: "/cache/train" load_path: "/cache/checkpoint_path/" device_target: "Ascend" checkpoint_path: "./checkpoint/" -checkpoint_file_path: "Unet3d-9-877.ckpt" +checkpoint_file_path: "Unet3d-10-110.ckpt" # ============================================================================== # Training options @@ -35,9 +35,13 @@ lower_limit: 3 # Export options device_id: 0 -ckpt_file: "" -file_name: "" -file_format: "" +ckpt_file: "./checkpoint/Unet3d-10-110.ckpt" +file_name: "unet3d" +file_format: "MINDIR" + +# 310 infer options +pre_result_path: "./preprocess_Result" +post_result_path: "./result_Files" --- # Help description for each configuration diff --git a/model_zoo/official/cv/unet3d/export.py b/model_zoo/official/cv/unet3d/export.py new file mode 100644 index 00000000000..c54fb4595c6 --- /dev/null +++ b/model_zoo/official/cv/unet3d/export.py @@ -0,0 +1,42 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import os +import numpy as np +from mindspore import dtype as mstype +from mindspore import context, Tensor, export +from mindspore.train.serialization import load_checkpoint, load_param_into_net +from src.unet3d_model import UNet3d, UNet3d_ +from src.model_utils.config import config +from src.model_utils.moxing_adapter import moxing_wrapper + +device_id = int(os.getenv('DEVICE_ID')) +context.set_context(mode=context.GRAPH_MODE, device_target=config.device_target, save_graphs=False, device_id=device_id) + +@moxing_wrapper() +def export_model(ckpt_path): + if config.device_target == 'Ascend': + network = UNet3d() + else: + network = UNet3d_() + network.set_train(False) + param_dict = load_checkpoint(ckpt_path) + load_param_into_net(network, param_dict) + image_shape = [config.batch_size, config.in_channels] + config.roi_size + window_image = Tensor(np.zeros(image_shape), mstype.float32) + export(network, window_image, file_name=config.file_name, file_format=config.file_format) + +if __name__ == '__main__': + export_model(ckpt_path=config.ckpt_file) diff --git a/model_zoo/official/cv/unet3d/postprocess.py b/model_zoo/official/cv/unet3d/postprocess.py new file mode 100644 index 00000000000..42398e5f906 --- /dev/null +++ b/model_zoo/official/cv/unet3d/postprocess.py @@ -0,0 +1,62 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import os +import numpy as np +from src.utils import create_sliding_window, CalculateDice +from src.model_utils.config import config +from src.model_utils.moxing_adapter import moxing_wrapper + +@moxing_wrapper() +def cal_acc(): + ''' calculate accuracy''' + index = 0 + total_dice = 0 + image_path = os.path.join(config.pre_result_path, "image") + seg_path = os.path.join(config.pre_result_path, "seg") + file_num = len(os.listdir(image_path)) + for j in range(file_num): + image = np.load(os.path.join(image_path, "unet3d_bs" + str(config.batch_size) + "_" + str(j) + ".npy")) + seg = np.load(os.path.join(seg_path, "unet3d_bs" + str(config.batch_size) + "_" + str(j) + ".npy")) + _, slice_list = create_sliding_window(image, config.roi_size, config.overlap) + image_size = (config.batch_size, config.num_classes) + image.shape[2:] + output_image = np.zeros(image_size, np.float32) + count_map = np.zeros(image_size, np.float32) + importance_map = np.ones(config.roi_size, np.float32) + i = 0 + w_name = "unet3d_bs" + str(config.batch_size) + "_bt" + str(j) + "_" + str(i) + "_0.bin" + w_path = os.path.join(config.post_result_path, w_name) + while os.path.isfile(w_path): + pred_shape = (config.batch_size, config.num_classes) + tuple(config.roi_size) + pred_probs = np.fromfile(w_path, np.float32).reshape(pred_shape) + slice_ = slice_list[i] + output_image[slice_] += pred_probs + count_map[slice_] += importance_map + i += 1 + w_name = "unet3d_bs" + str(config.batch_size) + "_bt" + str(j) + "_" + str(i) + "_0.bin" + w_path = os.path.join(config.post_result_path, w_name) + + output_image = output_image / count_map + dice, _ = CalculateDice(output_image, seg) + print("The {} batch dice is {}".format(index, dice), flush=True) + total_dice += dice + index = index + 1 + eval_data_size = index + avg_dice = total_dice / eval_data_size + print("**********************End Eval***************************************") + print("eval average dice is {}".format(avg_dice)) + +if __name__ == '__main__': + cal_acc() diff --git a/model_zoo/official/cv/unet3d/preprocess.py b/model_zoo/official/cv/unet3d/preprocess.py new file mode 100644 index 00000000000..cd48b058468 --- /dev/null +++ b/model_zoo/official/cv/unet3d/preprocess.py @@ -0,0 +1,58 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import os +import numpy as np +from src.dataset import create_dataset +from src.utils import create_sliding_window +from src.model_utils.config import config +from src.model_utils.moxing_adapter import moxing_wrapper + + +@moxing_wrapper() +def gen_bin(data_path): + ''' generate bin files.''' + data_dir = data_path + "/image/" + seg_dir = data_path + "/seg/" + eval_dataset = create_dataset(data_path=data_dir, seg_path=seg_dir, is_training=False) + eval_data_size = eval_dataset.get_dataset_size() + print("train dataset length is:", eval_data_size) + + window_path = os.path.join(config.pre_result_path, "00_data") + image_path = os.path.join(config.pre_result_path, "image") + seg_path = os.path.join(config.pre_result_path, "seg") + os.makedirs(window_path) + os.makedirs(image_path) + os.makedirs(seg_path) + j = 0 + + for batch in eval_dataset.create_dict_iterator(num_epochs=1, output_numpy=True): + image = batch["image"] + seg = batch["seg"] + f_name = "unet3d_bs" + str(config.batch_size) + "_" + str(j) + ".npy" + np.save(os.path.join(image_path, f_name), image) + np.save(os.path.join(seg_path, f_name), seg) + sliding_window_list, slice_list = create_sliding_window(image, config.roi_size, config.overlap) + i = 0 + for window, _ in zip(sliding_window_list, slice_list): + w_name = "unet3d_bs" + str(config.batch_size) + "_bt" + str(j) + "_" + str(i) + ".bin" + window_file_path = os.path.join(window_path, w_name) + window.tofile(window_file_path) + i += 1 + j += 1 + print("=" * 20, "export bin files finished", "=" * 20) + +if __name__ == '__main__': + gen_bin(data_path=config.data_path) diff --git a/model_zoo/official/cv/unet3d/scripts/run_infer_310.sh b/model_zoo/official/cv/unet3d/scripts/run_infer_310.sh new file mode 100644 index 00000000000..713e516b5f5 --- /dev/null +++ b/model_zoo/official/cv/unet3d/scripts/run_infer_310.sh @@ -0,0 +1,120 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +if [[ $# -lt 2 || $# -gt 3 ]]; then + echo "Usage: bash run_infer_310.sh [MINDIR_PATH] [NEED_PREPROCESS] [DEVICE_ID] + NEED_PREPROCESS means weather need preprocess or not, it's value is 'y' or 'n'. + DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero" +exit 1 +fi + +get_real_path(){ + if [ "${1:0:1}" == "/" ]; then + echo "$1" + else + echo "$(realpath -m $PWD/$1)" + fi +} +model=$(get_real_path $1) + +if [ "$2" == "y" ] || [ "$2" == "n" ];then + need_preprocess=$2 +else + echo "weather need preprocess or not, it's value must be in [y, n]" + exit 1 +fi + +device_id=0 +if [ $# == 3 ]; then + device_id=$3 +fi + +echo "mindir name: "$model +echo "need preprocess: "$need_preprocess +echo "device id: "$device_id + +export ASCEND_HOME=/usr/local/Ascend/ +if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then + export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH + export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe + export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp +else + export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH + export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/opp +fi + +function preprocess_data() +{ + if [ -d preprocess_Result ]; then + rm -rf ./preprocess_Result + fi + mkdir preprocess_Result + python3.7 ../preprocess.py +} + +function compile_app() +{ + cd ../ascend310_infer || exit + bash build.sh &> build.log +} + +function infer() +{ + cd - || exit + if [ -d result_Files ]; then + rm -rf ./result_Files + fi + if [ -d time_Result ]; then + rm -rf ./time_Result + fi + mkdir result_Files + mkdir time_Result + + ../ascend310_infer/out/main --mindir_path=$model --input0_path=./preprocess_Result/00_data --device_id=$device_id &> infer.log + +} + +function cal_acc() +{ + python3.7 ../postprocess.py &> acc.log +} + +if [ $need_preprocess == "y" ]; then + preprocess_data + if [ $? -ne 0 ]; then + echo "preprocess dataset failed" + exit 1 + fi +fi +compile_app +if [ $? -ne 0 ]; then + echo "compile app code failed" + exit 1 +fi +infer +if [ $? -ne 0 ]; then + echo " execute inference failed" + exit 1 +fi +cal_acc +if [ $? -ne 0 ]; then + echo "calculate accuracy failed" + exit 1 +fi \ No newline at end of file