From 3730213c4e66e99849ad7c0ad8c848c7b11b11b7 Mon Sep 17 00:00:00 2001 From: yuzhenhua Date: Wed, 28 Apr 2021 15:56:29 +0800 Subject: [PATCH] 310 inference for inceptionv3 and inceptionv4 --- .../cv/ctpn/ascend310_infer/inc/utils.h | 6 +- .../cv/ctpn/ascend310_infer/src/utils.cc | 6 +- .../cv/deeptext/ascend310_infer/inc/utils.h | 6 +- .../cv/deeptext/ascend310_infer/src/utils.cc | 6 +- model_zoo/official/cv/faster_rcnn/README.md | 2 +- .../official/cv/faster_rcnn/README_CN.md | 2 +- model_zoo/official/cv/inceptionv3/README.md | 32 ++++ .../official/cv/inceptionv3/README_CN.md | 34 +++- .../ascend310_infer/CMakeLists.txt | 14 ++ .../cv/inceptionv3/ascend310_infer/build.sh | 23 +++ .../inceptionv3/ascend310_infer/inc/utils.h | 32 ++++ .../inceptionv3/ascend310_infer/src/main.cc | 152 ++++++++++++++++++ .../inceptionv3/ascend310_infer/src/utils.cc | 130 +++++++++++++++ model_zoo/official/cv/inceptionv3/export.py | 5 +- .../official/cv/inceptionv3/postprocess.py | 58 +++++++ .../cv/inceptionv3/scripts/run_infer_310.sh | 107 ++++++++++++ model_zoo/official/cv/inceptionv4/README.md | 32 ++++ .../ascend310_infer/CMakeLists.txt | 14 ++ .../cv/inceptionv4/ascend310_infer/build.sh | 23 +++ .../inceptionv4/ascend310_infer/inc/utils.h | 32 ++++ .../inceptionv4/ascend310_infer/src/main.cc | 152 ++++++++++++++++++ .../inceptionv4/ascend310_infer/src/utils.cc | 130 +++++++++++++++ model_zoo/official/cv/inceptionv4/export.py | 5 +- .../official/cv/inceptionv4/postprocess.py | 58 +++++++ .../cv/inceptionv4/scripts/run_infer_310.sh | 104 ++++++++++++ 25 files changed, 1146 insertions(+), 19 deletions(-) create mode 100644 model_zoo/official/cv/inceptionv3/ascend310_infer/CMakeLists.txt create mode 100644 model_zoo/official/cv/inceptionv3/ascend310_infer/build.sh create mode 100644 model_zoo/official/cv/inceptionv3/ascend310_infer/inc/utils.h create mode 100644 model_zoo/official/cv/inceptionv3/ascend310_infer/src/main.cc create mode 100644 model_zoo/official/cv/inceptionv3/ascend310_infer/src/utils.cc create mode 100644 model_zoo/official/cv/inceptionv3/postprocess.py create mode 100755 model_zoo/official/cv/inceptionv3/scripts/run_infer_310.sh create mode 100644 model_zoo/official/cv/inceptionv4/ascend310_infer/CMakeLists.txt create mode 100644 model_zoo/official/cv/inceptionv4/ascend310_infer/build.sh create mode 100644 model_zoo/official/cv/inceptionv4/ascend310_infer/inc/utils.h create mode 100644 model_zoo/official/cv/inceptionv4/ascend310_infer/src/main.cc create mode 100644 model_zoo/official/cv/inceptionv4/ascend310_infer/src/utils.cc create mode 100644 model_zoo/official/cv/inceptionv4/postprocess.py create mode 100755 model_zoo/official/cv/inceptionv4/scripts/run_infer_310.sh diff --git a/model_zoo/official/cv/ctpn/ascend310_infer/inc/utils.h b/model_zoo/official/cv/ctpn/ascend310_infer/inc/utils.h index efebe03a8c1..abeb8fcbf11 100644 --- a/model_zoo/official/cv/ctpn/ascend310_infer/inc/utils.h +++ b/model_zoo/official/cv/ctpn/ascend310_infer/inc/utils.h @@ -1,12 +1,12 @@ /** * Copyright 2021 Huawei Technologies Co., Ltd - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/model_zoo/official/cv/ctpn/ascend310_infer/src/utils.cc b/model_zoo/official/cv/ctpn/ascend310_infer/src/utils.cc index e753dafa896..8b7737c91d6 100644 --- a/model_zoo/official/cv/ctpn/ascend310_infer/src/utils.cc +++ b/model_zoo/official/cv/ctpn/ascend310_infer/src/utils.cc @@ -1,12 +1,12 @@ /** * Copyright 2021 Huawei Technologies Co., Ltd - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/model_zoo/official/cv/deeptext/ascend310_infer/inc/utils.h b/model_zoo/official/cv/deeptext/ascend310_infer/inc/utils.h index efebe03a8c1..abeb8fcbf11 100644 --- a/model_zoo/official/cv/deeptext/ascend310_infer/inc/utils.h +++ b/model_zoo/official/cv/deeptext/ascend310_infer/inc/utils.h @@ -1,12 +1,12 @@ /** * Copyright 2021 Huawei Technologies Co., Ltd - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/model_zoo/official/cv/deeptext/ascend310_infer/src/utils.cc b/model_zoo/official/cv/deeptext/ascend310_infer/src/utils.cc index b509c57f823..1e545f0d139 100644 --- a/model_zoo/official/cv/deeptext/ascend310_infer/src/utils.cc +++ b/model_zoo/official/cv/deeptext/ascend310_infer/src/utils.cc @@ -1,12 +1,12 @@ /** * Copyright 2021 Huawei Technologies Co., Ltd - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/model_zoo/official/cv/faster_rcnn/README.md b/model_zoo/official/cv/faster_rcnn/README.md index 120246acc6c..9d58387d90b 100644 --- a/model_zoo/official/cv/faster_rcnn/README.md +++ b/model_zoo/official/cv/faster_rcnn/README.md @@ -344,7 +344,7 @@ python export.py --ckpt_file [CKPT_PATH] --device_target [DEVICE_TARGET] --file_ ### Usage -Before performing inference, the air file must bu exported by export script on the Ascend910 environment. +Before performing inference, the model file must be exported by export script on the Ascend910 environment. ```shell # Ascend310 inference diff --git a/model_zoo/official/cv/faster_rcnn/README_CN.md b/model_zoo/official/cv/faster_rcnn/README_CN.md index ae4ad6bb83a..e352d51fc87 100644 --- a/model_zoo/official/cv/faster_rcnn/README_CN.md +++ b/model_zoo/official/cv/faster_rcnn/README_CN.md @@ -216,7 +216,7 @@ sh run_infer_310.sh [AIR_PATH] [DATA_PATH] [ANN_FILE_PATH] [DEVICE_ID] ├─lr_schedule.py // 学习率生成器 ├─network_define.py // Faster R-CNN网络定义 └─util.py // 例行操作 - ├─export.py // 导出 AIR,MINDIR,ONNX模型的脚本 + ├─export.py // 导出 AIR,MINDIR模型的脚本 ├─eval.py // 评估脚本 ├─postprogress.py // 310推理后处理脚本 └─train.py // 训练脚本 diff --git a/model_zoo/official/cv/inceptionv3/README.md b/model_zoo/official/cv/inceptionv3/README.md index f94b48fda5b..0695dd864b7 100644 --- a/model_zoo/official/cv/inceptionv3/README.md +++ b/model_zoo/official/cv/inceptionv3/README.md @@ -74,12 +74,14 @@ For FP16 operators, if the input data type is FP32, the backend of MindSpore wil . └─Inception-v3 ├─README.md + ├─ascend310_infer # application for 310 inference ├─scripts ├─run_standalone_train_cpu.sh # launch standalone training with cpu platform ├─run_standalone_train_gpu.sh # launch standalone training with gpu platform(1p) ├─run_distribute_train_gpu.sh # launch distributed training with gpu platform(8p) ├─run_standalone_train.sh # launch standalone training with ascend platform(1p) ├─run_distribute_train.sh # launch distributed training with ascend platform(8p) + ├─run_infer_310.sh # shell script for 310 inference ├─run_eval_cpu.sh # launch evaluation with cpu platform ├─run_eval_gpu.sh # launch evaluation with gpu platform └─run_eval.sh # launch evaluating with ascend platform @@ -91,6 +93,7 @@ For FP16 operators, if the input data type is FP32, the backend of MindSpore wil ├─lr_generator.py # learning rate generator ├─eval.py # eval net ├─export.py # convert checkpoint + ├─postprogress.py # post process for 310 inference └─train.py # train net ``` @@ -238,6 +241,35 @@ Evaluation result will be stored in the example path, you can find result like t metric: {'Loss': 1.778, 'Top1-Acc':0.788, 'Top5-Acc':0.942} ``` +## Model Export + +```shell +python export.py --ckpt_file [CKPT_PATH] --device_target [DEVICE_TARGET] --file_format[EXPORT_FORMAT] +``` + +`EXPORT_FORMAT` should be in ["AIR", "MINDIR"] + +## Inference Process + +### Usage + +Before performing inference, the model file must be exported by export script on the Ascend910 environment. + +```shell +# Ascend310 inference +sh run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [ANN_FILE] [DEVICE_ID] +``` + +-NOTE: Ascend310 inference use Imagenet dataset . The label of the image is the number of folder which is started from 0 after sorting. + +### result + +Inference result is saved in current path, you can find result like this in acc.log file. + +```python +accuracy:78.742 +``` + # [Model description](#contents) ## [Performance](#contents) diff --git a/model_zoo/official/cv/inceptionv3/README_CN.md b/model_zoo/official/cv/inceptionv3/README_CN.md index b2e7cb3bbf6..ac0daf2d152 100644 --- a/model_zoo/official/cv/inceptionv3/README_CN.md +++ b/model_zoo/official/cv/inceptionv3/README_CN.md @@ -85,12 +85,14 @@ InceptionV3的总体网络架构如下: . └─Inception-v3 ├─README.md + ├─ascend310_infer # 实现310推理源代码 ├─scripts ├─run_standalone_train_cpu.sh # 启动CPU训练 ├─run_standalone_train_gpu.sh # 启动GPU单机训练(单卡) ├─run_distribute_train_gpu.sh # 启动GPU分布式训练(8卡) ├─run_standalone_train.sh # 启动Ascend单机训练(单卡) ├─run_distribute_train.sh # 启动Ascend分布式训练(8卡) + ├─run_infer_310.sh # Ascend推理shell脚本 ├─run_eval_cpu.sh # 启动CPU评估 ├─run_eval_gpu.sh # 启动GPU评估 └─run_eval.sh # 启动Ascend评估 @@ -101,7 +103,8 @@ InceptionV3的总体网络架构如下: ├─loss.py # 自定义交叉熵损失函数 ├─lr_generator.py # 学习率生成器 ├─eval.py # 评估网络 - ├─export.py # 转换检查点 + ├─export.py # 导出 AIR,MINDIR模型的脚本 + ├─postprogress.py # 310推理后处理脚本 └─train.py # 训练网络 ``` @@ -243,6 +246,35 @@ epoch time: 6358482.104 ms, per step time: 16303.800 ms metric:{'Loss':1.778, 'Top1-Acc':0.788, 'Top5-Acc':0.942} ``` +## 模型导出 + +```shell +python export.py --ckpt_file [CKPT_PATH] --device_target [DEVICE_TARGET] --file_format[EXPORT_FORMAT] +``` + +`EXPORT_FORMAT` 可选 ["AIR", "MINDIR"] + +## 推理过程 + +### 使用方法 + +在推理之前需要在昇腾910环境上完成模型的导出。 + +```shell +# Ascend310 inference +sh run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [ANN_FILE] [DEVICE_ID] +``` + +-注意:310推理使用ImageNet数据集. 图片的标签是将所在文件夹排序后获得的从0开始的编号 + +### 结果 + +推理的结果保存在当前目录下,在acc.log日志文件中可以找到类似以下的结果。 + +```python +accuracy:78.742 +``` + # 模型描述 ## 性能 diff --git a/model_zoo/official/cv/inceptionv3/ascend310_infer/CMakeLists.txt b/model_zoo/official/cv/inceptionv3/ascend310_infer/CMakeLists.txt new file mode 100644 index 00000000000..ee3c8544734 --- /dev/null +++ b/model_zoo/official/cv/inceptionv3/ascend310_infer/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 3.14.1) +project(Ascend310Infer) +add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 -g -std=c++17 -Werror -Wall -fPIE -Wl,--allow-shlib-undefined") +set(PROJECT_SRC_ROOT ${CMAKE_CURRENT_LIST_DIR}/) +option(MINDSPORE_PATH "mindspore install path" "") +include_directories(${MINDSPORE_PATH}) +include_directories(${MINDSPORE_PATH}/include) +include_directories(${PROJECT_SRC_ROOT}) +find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) +file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) + +add_executable(main src/main.cc src/utils.cc) +target_link_libraries(main ${MS_LIB} ${MD_LIB} gflags) diff --git a/model_zoo/official/cv/inceptionv3/ascend310_infer/build.sh b/model_zoo/official/cv/inceptionv3/ascend310_infer/build.sh new file mode 100644 index 00000000000..770a8851efa --- /dev/null +++ b/model_zoo/official/cv/inceptionv3/ascend310_infer/build.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +if [ ! -d out ]; then + mkdir out +fi +cd out || exit +cmake .. \ + -DMINDSPORE_PATH="`pip show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`" +make diff --git a/model_zoo/official/cv/inceptionv3/ascend310_infer/inc/utils.h b/model_zoo/official/cv/inceptionv3/ascend310_infer/inc/utils.h new file mode 100644 index 00000000000..abeb8fcbf11 --- /dev/null +++ b/model_zoo/official/cv/inceptionv3/ascend310_infer/inc/utils.h @@ -0,0 +1,32 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_INFERENCE_UTILS_H_ +#define MINDSPORE_INFERENCE_UTILS_H_ + +#include +#include +#include +#include +#include +#include "include/api/types.h" + +std::vector GetAllFiles(std::string_view dirName); +DIR *OpenDir(std::string_view dirName); +std::string RealPath(std::string_view path); +mindspore::MSTensor ReadFileToTensor(const std::string &file); +int WriteResult(const std::string& imageFile, const std::vector &outputs); +#endif diff --git a/model_zoo/official/cv/inceptionv3/ascend310_infer/src/main.cc b/model_zoo/official/cv/inceptionv3/ascend310_infer/src/main.cc new file mode 100644 index 00000000000..4797523755b --- /dev/null +++ b/model_zoo/official/cv/inceptionv3/ascend310_infer/src/main.cc @@ -0,0 +1,152 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "../inc/utils.h" +#include "include/dataset/execute.h" +#include "include/dataset/transforms.h" +#include "include/dataset/vision.h" +#include "include/dataset/vision_ascend.h" +#include "include/api/types.h" +#include "include/api/model.h" +#include "include/api/serialization.h" +#include "include/api/context.h" + +using mindspore::Serialization; +using mindspore::Model; +using mindspore::Context; +using mindspore::Status; +using mindspore::ModelType; +using mindspore::Graph; +using mindspore::GraphCell; +using mindspore::kSuccess; +using mindspore::MSTensor; +using mindspore::DataType; +using mindspore::dataset::Execute; +using mindspore::dataset::TensorTransform; +using mindspore::dataset::vision::Decode; +using mindspore::dataset::vision::Resize; +using mindspore::dataset::vision::CenterCrop; +using mindspore::dataset::vision::Normalize; +using mindspore::dataset::vision::HWC2CHW; + +using mindspore::dataset::transforms::TypeCast; + + +DEFINE_string(model_path, "", "model path"); +DEFINE_string(dataset_path, ".", "dataset path"); +DEFINE_int32(device_id, 0, "device id"); + +int main(int argc, char **argv) { + gflags::ParseCommandLineFlags(&argc, &argv, true); + if (RealPath(FLAGS_model_path).empty()) { + std::cout << "Invalid model" << std::endl; + return 1; + } + + auto context = std::make_shared(); + auto ascend310_info = std::make_shared(); + ascend310_info->SetDeviceID(FLAGS_device_id); + context->MutableDeviceInfo().push_back(ascend310_info); + + Graph graph; + Status ret = Serialization::Load(FLAGS_model_path, ModelType::kMindIR, &graph); + if (ret != kSuccess) { + std::cout << "Load model failed." << std::endl; + return 1; + } + + Model model; + ret = model.Build(GraphCell(graph), context); + if (ret != kSuccess) { + std::cout << "ERROR: Build failed." << std::endl; + return 1; + } + + std::vector modelInputs = model.GetInputs(); + + auto all_files = GetAllFiles(FLAGS_dataset_path); + if (all_files.empty()) { + std::cout << "ERROR: no input data." << std::endl; + return 1; + } + + std::shared_ptr decode(new Decode()); + std::shared_ptr resize(new Resize({299})); + std::shared_ptr centerCrop(new CenterCrop({299})); + std::shared_ptr normalize(new Normalize({123.675, 116.28, 103.53}, {58.395, 57.12, 57.375})); + std::shared_ptr hwc2chw(new HWC2CHW()); + + mindspore::dataset::Execute transform({decode, resize, centerCrop, normalize, hwc2chw}); + + std::map costTime_map; + + size_t size = all_files.size(); + for (size_t i = 0; i < size; ++i) { + struct timeval start; + struct timeval end; + double startTime_ms; + double endTime_ms; + std::vector inputs; + std::vector outputs; + + std::cout << "Start predict input files:" << all_files[i] << std::endl; + mindspore::MSTensor image = ReadFileToTensor(all_files[i]); + + transform(image, &image); + + inputs.emplace_back(modelInputs[0].Name(), modelInputs[0].DataType(), modelInputs[0].Shape(), + image.Data().get(), image.DataSize()); + + gettimeofday(&start, NULL); + model.Predict(inputs, &outputs); + gettimeofday(&end, NULL); + + startTime_ms = (1.0 * start.tv_sec * 1000000 + start.tv_usec) / 1000; + endTime_ms = (1.0 * end.tv_sec * 1000000 + end.tv_usec) / 1000; + costTime_map.insert(std::pair(startTime_ms, endTime_ms)); + WriteResult(all_files[i], outputs); + } + double average = 0.0; + int infer_cnt = 0; + char tmpCh[256] = {0}; + for (auto iter = costTime_map.begin(); iter != costTime_map.end(); iter++) { + double diff = 0.0; + diff = iter->second - iter->first; + average += diff; + infer_cnt++; + } + + average = average/infer_cnt; + + snprintf(tmpCh, sizeof(tmpCh), "NN inference cost average time: %4.3f ms of infer_count %d\n", average, infer_cnt); + std::cout << "NN inference cost average time: "<< average << "ms of infer_count " << infer_cnt << std::endl; + std::string file_name = "./time_Result" + std::string("/test_perform_static.txt"); + std::ofstream file_stream(file_name.c_str(), std::ios::trunc); + file_stream << tmpCh; + file_stream.close(); + costTime_map.clear(); + return 0; +} diff --git a/model_zoo/official/cv/inceptionv3/ascend310_infer/src/utils.cc b/model_zoo/official/cv/inceptionv3/ascend310_infer/src/utils.cc new file mode 100644 index 00000000000..1e545f0d139 --- /dev/null +++ b/model_zoo/official/cv/inceptionv3/ascend310_infer/src/utils.cc @@ -0,0 +1,130 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "inc/utils.h" + +#include +#include +#include + +using mindspore::MSTensor; +using mindspore::DataType; + +std::vector GetAllFiles(std::string_view dirName) { + struct dirent *filename; + DIR *dir = OpenDir(dirName); + if (dir == nullptr) { + return {}; + } + std::vector res; + while ((filename = readdir(dir)) != nullptr) { + std::string dName = std::string(filename->d_name); + if (dName == "." || dName == ".." || filename->d_type != DT_REG) { + continue; + } + res.emplace_back(std::string(dirName) + "/" + filename->d_name); + } + std::sort(res.begin(), res.end()); + for (auto &f : res) { + std::cout << "image file: " << f << std::endl; + } + return res; +} + +int WriteResult(const std::string& imageFile, const std::vector &outputs) { + std::string homePath = "./result_Files"; + for (size_t i = 0; i < outputs.size(); ++i) { + size_t outputSize; + std::shared_ptr netOutput; + netOutput = outputs[i].Data(); + outputSize = outputs[i].DataSize(); + int pos = imageFile.rfind('/'); + std::string fileName(imageFile, pos + 1); + fileName.replace(fileName.find('.'), fileName.size() - fileName.find('.'), '_' + std::to_string(i) + ".bin"); + std::string outFileName = homePath + "/" + fileName; + FILE * outputFile = fopen(outFileName.c_str(), "wb"); + fwrite(netOutput.get(), outputSize, sizeof(char), outputFile); + fclose(outputFile); + outputFile = nullptr; + } + return 0; +} + +mindspore::MSTensor ReadFileToTensor(const std::string &file) { + if (file.empty()) { + std::cout << "Pointer file is nullptr" << std::endl; + return mindspore::MSTensor(); + } + + std::ifstream ifs(file); + if (!ifs.good()) { + std::cout << "File: " << file << " is not exist" << std::endl; + return mindspore::MSTensor(); + } + + if (!ifs.is_open()) { + std::cout << "File: " << file << "open failed" << std::endl; + return mindspore::MSTensor(); + } + + ifs.seekg(0, std::ios::end); + size_t size = ifs.tellg(); + mindspore::MSTensor buffer(file, mindspore::DataType::kNumberTypeUInt8, {static_cast(size)}, nullptr, size); + + ifs.seekg(0, std::ios::beg); + ifs.read(reinterpret_cast(buffer.MutableData()), size); + ifs.close(); + + return buffer; +} + + +DIR *OpenDir(std::string_view dirName) { + if (dirName.empty()) { + std::cout << " dirName is null ! " << std::endl; + return nullptr; + } + std::string realPath = RealPath(dirName); + struct stat s; + lstat(realPath.c_str(), &s); + if (!S_ISDIR(s.st_mode)) { + std::cout << "dirName is not a valid directory !" << std::endl; + return nullptr; + } + DIR *dir; + dir = opendir(realPath.c_str()); + if (dir == nullptr) { + std::cout << "Can not open dir " << dirName << std::endl; + return nullptr; + } + std::cout << "Successfully opened the dir " << dirName << std::endl; + return dir; +} + +std::string RealPath(std::string_view path) { + char realPathMem[PATH_MAX] = {0}; + char *realPathRet = nullptr; + realPathRet = realpath(path.data(), realPathMem); + + if (realPathRet == nullptr) { + std::cout << "File: " << path << " is not exist."; + return ""; + } + + std::string realPath(realPathMem); + std::cout << path << " realpath is: " << realPath << std::endl; + return realPath; +} diff --git a/model_zoo/official/cv/inceptionv3/export.py b/model_zoo/official/cv/inceptionv3/export.py index 17845bea8b5..bea3f44be09 100644 --- a/model_zoo/official/cv/inceptionv3/export.py +++ b/model_zoo/official/cv/inceptionv3/export.py @@ -24,9 +24,10 @@ from src.inception_v3 import InceptionV3 parser = argparse.ArgumentParser(description='inceptionv3 export') parser.add_argument("--device_id", type=int, default=0, help="Device id") +parser.add_argument("--batch_size", type=int, default=1, help="batch size") parser.add_argument('--ckpt_file', type=str, required=True, help='inceptionv3 ckpt file.') parser.add_argument('--file_name', type=str, default='inceptionv3', help='inceptionv3 output air name.') -parser.add_argument('--file_format', type=str, choices=["AIR", "ONNX", "MINDIR"], default='AIR', help='file format') +parser.add_argument('--file_format', type=str, choices=["AIR", "MINDIR"], default='AIR', help='file format') parser.add_argument('--width', type=int, default=299, help='input width') parser.add_argument('--height', type=int, default=299, help='input height') parser.add_argument("--device_target", type=str, choices=["Ascend", "GPU", "CPU"], default="Ascend", @@ -42,6 +43,6 @@ if __name__ == '__main__': param_dict = load_checkpoint(args.ckpt_file) load_param_into_net(net, param_dict) - input_arr = Tensor(np.random.uniform(0.0, 1.0, size=[cfg.batch_size, 3, args.width, args.height]), ms.float32) + input_arr = Tensor(np.random.uniform(0.0, 1.0, size=[args.batch_size, 3, args.width, args.height]), ms.float32) export(net, input_arr, file_name=args.file_name, file_format=args.file_format) diff --git a/model_zoo/official/cv/inceptionv3/postprocess.py b/model_zoo/official/cv/inceptionv3/postprocess.py new file mode 100644 index 00000000000..584c70d096e --- /dev/null +++ b/model_zoo/official/cv/inceptionv3/postprocess.py @@ -0,0 +1,58 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +'''post process for 310 inference''' +import os +import argparse +import numpy as np + +parser = argparse.ArgumentParser(description='fasterrcnn_export') +parser.add_argument("--result_path", type=str, required=True, help="result file path") +parser.add_argument("--label_file", type=str, required=True, help="label file") +args = parser.parse_args() + +def read_label(label_file): + f = open(label_file, "r") + lines = f.readlines() + + img_label = {} + for line in lines: + img_id = line.split(":")[0] + label = line.split(":")[1] + img_label[img_id] = label + + return img_label + +def cal_acc(result_path, label_file): + step = 0 + sum_a = 0 + img_label = read_label(label_file) + + files = os.listdir(result_path) + for file in files: + full_file_path = os.path.join(result_path, file) + if os.path.isfile(full_file_path): + result = np.fromfile(full_file_path, dtype=np.float32).reshape(1, 1000) + pred = np.argmax(result, axis=1) + step = step + 1 + if pred == int(img_label[file[:-6]]): + sum_a = sum_a + 1 + + print("========step:{}========".format(step)) + print("========sum:{}========".format(sum_a)) + accuracy = sum_a * 100.0 / step + print("========accuracy:{}========".format(accuracy)) + +if __name__ == "__main__": + cal_acc(args.result_path, args.label_file) diff --git a/model_zoo/official/cv/inceptionv3/scripts/run_infer_310.sh b/model_zoo/official/cv/inceptionv3/scripts/run_infer_310.sh new file mode 100755 index 00000000000..1230e319fa9 --- /dev/null +++ b/model_zoo/official/cv/inceptionv3/scripts/run_infer_310.sh @@ -0,0 +1,107 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +if [[ $# -lt 3 || $# -gt 4 ]]; then + echo "Usage: sh run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [LABEL_FILE] [DEVICE_ID] + DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero" +exit 1 +fi + +get_real_path(){ + if [ "${1:0:1}" == "/" ]; then + echo "$1" + else + echo "$(realpath -m $PWD/$1)" + fi +} + +model=$(get_real_path $1) +data_path=$(get_real_path $2) +label_file=$(get_real_path $3) +if [ $# == 4 ]; then + device_id=$4 +elif [ $# == 3 ]; then + if [ -z $device_id ]; then + device_id=0 + else + device_id=$device_id + fi +fi + +echo $model +echo $data_path +echo $label_file +echo $device_id + +export ASCEND_HOME=/usr/local/Ascend/ +if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then + export PATH=$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH + export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe + export PYTHONPATH=${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp +else + export PATH=$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH + export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export PYTHONPATH=$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/opp +fi + +function compile_app() +{ + cd ../ascend310_infer || exit + if [ -f "Makefile" ]; then + make clean + fi + sh build.sh &> build.log + + if [ $? -ne 0 ]; then + echo "compile app code failed" + exit 1 + fi + cd - || exit +} + +function infer() +{ + if [ -d result_Files ]; then + rm -rf ./result_Files + fi + if [ -d time_Result ]; then + rm -rf ./time_Result + fi + mkdir result_Files + mkdir time_Result + ../ascend310_infer/out/main --model_path=$model --dataset_path=$data_path --device_id=$device_id &> infer.log + + if [ $? -ne 0 ]; then + echo "execute inference failed" + exit 1 + fi +} + +function cal_acc() +{ + python ../postprocess.py --label_file=$label_file --result_path=result_Files &> acc.log + if [ $? -ne 0 ]; then + echo "calculate accuracy failed" + exit 1 + fi +} + +compile_app +infer +cal_acc diff --git a/model_zoo/official/cv/inceptionv4/README.md b/model_zoo/official/cv/inceptionv4/README.md index e1c0dd9fe98..fa7c9297dc5 100644 --- a/model_zoo/official/cv/inceptionv4/README.md +++ b/model_zoo/official/cv/inceptionv4/README.md @@ -67,11 +67,13 @@ For FP16 operators, if the input data type is FP32, the backend of MindSpore wil . └─Inception-v4 ├─README.md + ├─ascend310_infer # application for 310 inference ├─scripts ├─run_distribute_train_gpu.sh # launch distributed training with gpu platform(8p) ├─run_eval_gpu.sh # launch evaluating with gpu platform ├─run_standalone_train_ascend.sh # launch standalone training with ascend platform(1p) ├─run_distribute_train_ascend.sh # launch distributed training with ascend platform(8p) + ├─run_infer_310.sh # shell script for 310 inference └─run_eval_ascend.sh # launch evaluating with ascend platform ├─src ├─config.py # parameter configuration @@ -80,6 +82,7 @@ For FP16 operators, if the input data type is FP32, the backend of MindSpore wil └─callback.py # eval callback function ├─eval.py # eval net ├─export.py # export checkpoint, surpport .onnx, .air, .mindir convert + ├─postprogress.py # post process for 310 inference └─train.py # train net ``` @@ -223,6 +226,35 @@ metric: {'Loss': 0.9849, 'Top1-Acc':0.7985, 'Top5-Acc':0.9460} metric: {'Loss': 0.8144, 'Top1-Acc': 0.8009, 'Top5-Acc': 0.9457} ``` +## Model Export + +```shell +python export.py --ckpt_file [CKPT_PATH] --device_target [DEVICE_TARGET] --file_format[EXPORT_FORMAT] +``` + +`EXPORT_FORMAT` should be in ["AIR", "MINDIR"] + +## Inference Process + +### Usage + +Before performing inference, the model file must be exported by export script on the Ascend910 environment. + +```shell +# Ascend310 inference +sh run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [ANN_FILE] [DEVICE_ID] +``` + +-NOTE:Ascend310 inference use Imagenet dataset . The label of the image is the number of folder which is started from 0 after sorting. + +### result + +Inference result is saved in current path, you can find result like this in acc.log file. + +```python +accuracy:80.044 +``` + # [Model description](#contents) ## [Performance](#contents) diff --git a/model_zoo/official/cv/inceptionv4/ascend310_infer/CMakeLists.txt b/model_zoo/official/cv/inceptionv4/ascend310_infer/CMakeLists.txt new file mode 100644 index 00000000000..ee3c8544734 --- /dev/null +++ b/model_zoo/official/cv/inceptionv4/ascend310_infer/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 3.14.1) +project(Ascend310Infer) +add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 -g -std=c++17 -Werror -Wall -fPIE -Wl,--allow-shlib-undefined") +set(PROJECT_SRC_ROOT ${CMAKE_CURRENT_LIST_DIR}/) +option(MINDSPORE_PATH "mindspore install path" "") +include_directories(${MINDSPORE_PATH}) +include_directories(${MINDSPORE_PATH}/include) +include_directories(${PROJECT_SRC_ROOT}) +find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) +file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) + +add_executable(main src/main.cc src/utils.cc) +target_link_libraries(main ${MS_LIB} ${MD_LIB} gflags) diff --git a/model_zoo/official/cv/inceptionv4/ascend310_infer/build.sh b/model_zoo/official/cv/inceptionv4/ascend310_infer/build.sh new file mode 100644 index 00000000000..770a8851efa --- /dev/null +++ b/model_zoo/official/cv/inceptionv4/ascend310_infer/build.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +if [ ! -d out ]; then + mkdir out +fi +cd out || exit +cmake .. \ + -DMINDSPORE_PATH="`pip show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`" +make diff --git a/model_zoo/official/cv/inceptionv4/ascend310_infer/inc/utils.h b/model_zoo/official/cv/inceptionv4/ascend310_infer/inc/utils.h new file mode 100644 index 00000000000..abeb8fcbf11 --- /dev/null +++ b/model_zoo/official/cv/inceptionv4/ascend310_infer/inc/utils.h @@ -0,0 +1,32 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_INFERENCE_UTILS_H_ +#define MINDSPORE_INFERENCE_UTILS_H_ + +#include +#include +#include +#include +#include +#include "include/api/types.h" + +std::vector GetAllFiles(std::string_view dirName); +DIR *OpenDir(std::string_view dirName); +std::string RealPath(std::string_view path); +mindspore::MSTensor ReadFileToTensor(const std::string &file); +int WriteResult(const std::string& imageFile, const std::vector &outputs); +#endif diff --git a/model_zoo/official/cv/inceptionv4/ascend310_infer/src/main.cc b/model_zoo/official/cv/inceptionv4/ascend310_infer/src/main.cc new file mode 100644 index 00000000000..4797523755b --- /dev/null +++ b/model_zoo/official/cv/inceptionv4/ascend310_infer/src/main.cc @@ -0,0 +1,152 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "../inc/utils.h" +#include "include/dataset/execute.h" +#include "include/dataset/transforms.h" +#include "include/dataset/vision.h" +#include "include/dataset/vision_ascend.h" +#include "include/api/types.h" +#include "include/api/model.h" +#include "include/api/serialization.h" +#include "include/api/context.h" + +using mindspore::Serialization; +using mindspore::Model; +using mindspore::Context; +using mindspore::Status; +using mindspore::ModelType; +using mindspore::Graph; +using mindspore::GraphCell; +using mindspore::kSuccess; +using mindspore::MSTensor; +using mindspore::DataType; +using mindspore::dataset::Execute; +using mindspore::dataset::TensorTransform; +using mindspore::dataset::vision::Decode; +using mindspore::dataset::vision::Resize; +using mindspore::dataset::vision::CenterCrop; +using mindspore::dataset::vision::Normalize; +using mindspore::dataset::vision::HWC2CHW; + +using mindspore::dataset::transforms::TypeCast; + + +DEFINE_string(model_path, "", "model path"); +DEFINE_string(dataset_path, ".", "dataset path"); +DEFINE_int32(device_id, 0, "device id"); + +int main(int argc, char **argv) { + gflags::ParseCommandLineFlags(&argc, &argv, true); + if (RealPath(FLAGS_model_path).empty()) { + std::cout << "Invalid model" << std::endl; + return 1; + } + + auto context = std::make_shared(); + auto ascend310_info = std::make_shared(); + ascend310_info->SetDeviceID(FLAGS_device_id); + context->MutableDeviceInfo().push_back(ascend310_info); + + Graph graph; + Status ret = Serialization::Load(FLAGS_model_path, ModelType::kMindIR, &graph); + if (ret != kSuccess) { + std::cout << "Load model failed." << std::endl; + return 1; + } + + Model model; + ret = model.Build(GraphCell(graph), context); + if (ret != kSuccess) { + std::cout << "ERROR: Build failed." << std::endl; + return 1; + } + + std::vector modelInputs = model.GetInputs(); + + auto all_files = GetAllFiles(FLAGS_dataset_path); + if (all_files.empty()) { + std::cout << "ERROR: no input data." << std::endl; + return 1; + } + + std::shared_ptr decode(new Decode()); + std::shared_ptr resize(new Resize({299})); + std::shared_ptr centerCrop(new CenterCrop({299})); + std::shared_ptr normalize(new Normalize({123.675, 116.28, 103.53}, {58.395, 57.12, 57.375})); + std::shared_ptr hwc2chw(new HWC2CHW()); + + mindspore::dataset::Execute transform({decode, resize, centerCrop, normalize, hwc2chw}); + + std::map costTime_map; + + size_t size = all_files.size(); + for (size_t i = 0; i < size; ++i) { + struct timeval start; + struct timeval end; + double startTime_ms; + double endTime_ms; + std::vector inputs; + std::vector outputs; + + std::cout << "Start predict input files:" << all_files[i] << std::endl; + mindspore::MSTensor image = ReadFileToTensor(all_files[i]); + + transform(image, &image); + + inputs.emplace_back(modelInputs[0].Name(), modelInputs[0].DataType(), modelInputs[0].Shape(), + image.Data().get(), image.DataSize()); + + gettimeofday(&start, NULL); + model.Predict(inputs, &outputs); + gettimeofday(&end, NULL); + + startTime_ms = (1.0 * start.tv_sec * 1000000 + start.tv_usec) / 1000; + endTime_ms = (1.0 * end.tv_sec * 1000000 + end.tv_usec) / 1000; + costTime_map.insert(std::pair(startTime_ms, endTime_ms)); + WriteResult(all_files[i], outputs); + } + double average = 0.0; + int infer_cnt = 0; + char tmpCh[256] = {0}; + for (auto iter = costTime_map.begin(); iter != costTime_map.end(); iter++) { + double diff = 0.0; + diff = iter->second - iter->first; + average += diff; + infer_cnt++; + } + + average = average/infer_cnt; + + snprintf(tmpCh, sizeof(tmpCh), "NN inference cost average time: %4.3f ms of infer_count %d\n", average, infer_cnt); + std::cout << "NN inference cost average time: "<< average << "ms of infer_count " << infer_cnt << std::endl; + std::string file_name = "./time_Result" + std::string("/test_perform_static.txt"); + std::ofstream file_stream(file_name.c_str(), std::ios::trunc); + file_stream << tmpCh; + file_stream.close(); + costTime_map.clear(); + return 0; +} diff --git a/model_zoo/official/cv/inceptionv4/ascend310_infer/src/utils.cc b/model_zoo/official/cv/inceptionv4/ascend310_infer/src/utils.cc new file mode 100644 index 00000000000..1e545f0d139 --- /dev/null +++ b/model_zoo/official/cv/inceptionv4/ascend310_infer/src/utils.cc @@ -0,0 +1,130 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "inc/utils.h" + +#include +#include +#include + +using mindspore::MSTensor; +using mindspore::DataType; + +std::vector GetAllFiles(std::string_view dirName) { + struct dirent *filename; + DIR *dir = OpenDir(dirName); + if (dir == nullptr) { + return {}; + } + std::vector res; + while ((filename = readdir(dir)) != nullptr) { + std::string dName = std::string(filename->d_name); + if (dName == "." || dName == ".." || filename->d_type != DT_REG) { + continue; + } + res.emplace_back(std::string(dirName) + "/" + filename->d_name); + } + std::sort(res.begin(), res.end()); + for (auto &f : res) { + std::cout << "image file: " << f << std::endl; + } + return res; +} + +int WriteResult(const std::string& imageFile, const std::vector &outputs) { + std::string homePath = "./result_Files"; + for (size_t i = 0; i < outputs.size(); ++i) { + size_t outputSize; + std::shared_ptr netOutput; + netOutput = outputs[i].Data(); + outputSize = outputs[i].DataSize(); + int pos = imageFile.rfind('/'); + std::string fileName(imageFile, pos + 1); + fileName.replace(fileName.find('.'), fileName.size() - fileName.find('.'), '_' + std::to_string(i) + ".bin"); + std::string outFileName = homePath + "/" + fileName; + FILE * outputFile = fopen(outFileName.c_str(), "wb"); + fwrite(netOutput.get(), outputSize, sizeof(char), outputFile); + fclose(outputFile); + outputFile = nullptr; + } + return 0; +} + +mindspore::MSTensor ReadFileToTensor(const std::string &file) { + if (file.empty()) { + std::cout << "Pointer file is nullptr" << std::endl; + return mindspore::MSTensor(); + } + + std::ifstream ifs(file); + if (!ifs.good()) { + std::cout << "File: " << file << " is not exist" << std::endl; + return mindspore::MSTensor(); + } + + if (!ifs.is_open()) { + std::cout << "File: " << file << "open failed" << std::endl; + return mindspore::MSTensor(); + } + + ifs.seekg(0, std::ios::end); + size_t size = ifs.tellg(); + mindspore::MSTensor buffer(file, mindspore::DataType::kNumberTypeUInt8, {static_cast(size)}, nullptr, size); + + ifs.seekg(0, std::ios::beg); + ifs.read(reinterpret_cast(buffer.MutableData()), size); + ifs.close(); + + return buffer; +} + + +DIR *OpenDir(std::string_view dirName) { + if (dirName.empty()) { + std::cout << " dirName is null ! " << std::endl; + return nullptr; + } + std::string realPath = RealPath(dirName); + struct stat s; + lstat(realPath.c_str(), &s); + if (!S_ISDIR(s.st_mode)) { + std::cout << "dirName is not a valid directory !" << std::endl; + return nullptr; + } + DIR *dir; + dir = opendir(realPath.c_str()); + if (dir == nullptr) { + std::cout << "Can not open dir " << dirName << std::endl; + return nullptr; + } + std::cout << "Successfully opened the dir " << dirName << std::endl; + return dir; +} + +std::string RealPath(std::string_view path) { + char realPathMem[PATH_MAX] = {0}; + char *realPathRet = nullptr; + realPathRet = realpath(path.data(), realPathMem); + + if (realPathRet == nullptr) { + std::cout << "File: " << path << " is not exist."; + return ""; + } + + std::string realPath(realPathMem); + std::cout << path << " realpath is: " << realPath << std::endl; + return realPath; +} diff --git a/model_zoo/official/cv/inceptionv4/export.py b/model_zoo/official/cv/inceptionv4/export.py index 0131287e506..a5b3fa8fca0 100644 --- a/model_zoo/official/cv/inceptionv4/export.py +++ b/model_zoo/official/cv/inceptionv4/export.py @@ -25,9 +25,10 @@ from src.inceptionv4 import Inceptionv4 parser = argparse.ArgumentParser(description='inceptionv4 export') parser.add_argument("--device_id", type=int, default=0, help="Device id") +parser.add_argument("--batch_size", type=int, default=1, help="batch size") parser.add_argument('--ckpt_file', type=str, required=True, help='inceptionv4 ckpt file.') parser.add_argument('--file_name', type=str, default='inceptionv4', help='inceptionv4 output air name.') -parser.add_argument('--file_format', type=str, choices=["AIR", "ONNX", "MINDIR"], default='AIR', help='file format') +parser.add_argument('--file_format', type=str, choices=["AIR", "MINDIR"], default='AIR', help='file format') parser.add_argument('--width', type=int, default=299, help='input width') parser.add_argument('--height', type=int, default=299, help='input height') parser.add_argument("--device_target", type=str, choices=["Ascend", "GPU", "CPU"], default="Ascend", @@ -43,5 +44,5 @@ if __name__ == '__main__': param_dict = load_checkpoint(args.ckpt_file) load_param_into_net(net, param_dict) - input_arr = Tensor(np.ones([config.batch_size, 3, args.width, args.height]), ms.float32) + input_arr = Tensor(np.ones([args.batch_size, 3, args.width, args.height]), ms.float32) export(net, input_arr, file_name=args.file_name, file_format=args.file_format) diff --git a/model_zoo/official/cv/inceptionv4/postprocess.py b/model_zoo/official/cv/inceptionv4/postprocess.py new file mode 100644 index 00000000000..0b032de2b3f --- /dev/null +++ b/model_zoo/official/cv/inceptionv4/postprocess.py @@ -0,0 +1,58 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +'''post process for 310 inference''' +import os +import argparse +import numpy as np + +parser = argparse.ArgumentParser(description='fasterrcnn_export') +parser.add_argument("--result_path", type=str, required=True, help="result file path") +parser.add_argument("--label_file", type=str, required=True, help="label file") +args = parser.parse_args() + +def read_label(label_file): + f = open(label_file, "r") + lines = f.readlines() + + img_label = {} + for line in lines: + img_id = line.split(":")[0] + label = line.split(":")[1] + img_label[img_id] = label + + return img_label + +def cal_acc(result_path, label_file): + step = 0 + sum_a = 0 + img_label = read_label(label_file) + + files = os.listdir(result_path) + for file in files: + full_file_path = os.path.join(result_path, file) + if os.path.isfile(full_file_path): + result = np.fromfile(full_file_path, dtype=np.float32).reshape(1, 1000) + pred = np.argmax(result, axis=1) + step = step + 1 + if pred == int(img_label[file[:-6]]): + sum_a = sum_a + 1 + + print("========step:{}========".format(step)) + print("========sum:{}========".format(sum_a)) + accuracy = sum_a * 100.0 / step + print("========accuraty:{}========".format(accuracy)) + +if __name__ == "__main__": + cal_acc(args.result_path, args.label_file) diff --git a/model_zoo/official/cv/inceptionv4/scripts/run_infer_310.sh b/model_zoo/official/cv/inceptionv4/scripts/run_infer_310.sh new file mode 100755 index 00000000000..faa08fba0c1 --- /dev/null +++ b/model_zoo/official/cv/inceptionv4/scripts/run_infer_310.sh @@ -0,0 +1,104 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +if [[ $# -lt 3 || $# -gt 4 ]]; then + echo "Usage: bash run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [LABEL_FILE] [DEVICE_ID] + DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero" +exit 1 +fi + +get_real_path(){ + if [ "${1:0:1}" == "/" ]; then + echo "$1" + else + echo "$(realpath -m $PWD/$1)" + fi +} + +model=$(get_real_path $1) +data_path=$(get_real_path $2) +label_file=$(get_real_path $3) + +device_id=0 + +if [ $# == 4 ]; then + device_id=$4 +fi + +echo $model +echo $data_path +echo $label_file +echo $device_id + +export ASCEND_HOME=/usr/local/Ascend/ +if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then + export PATH=$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH + export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe + export PYTHONPATH=${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp +else + export PATH=$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH + export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export PYTHONPATH=$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/opp +fi + +function compile_app() +{ + cd ../ascend310_infer || exit + if [ -f "Makefile" ]; then + make clean + fi + sh build.sh &> build.log + + if [ $? -ne 0 ]; then + echo "compile app code failed" + exit 1 + fi + cd - || exit +} + +function infer() +{ + if [ -d result_Files ]; then + rm -rf ./result_Files + fi + if [ -d time_Result ]; then + rm -rf ./time_Result + fi + mkdir result_Files + mkdir time_Result + ../ascend310_infer/out/main --model_path=$model --dataset_path=$data_path --device_id=$device_id &> infer.log + + if [ $? -ne 0 ]; then + echo "execute inference failed" + exit 1 + fi +} + +function cal_acc() +{ + python ../postprocess.py --label_file=$label_file --result_path=result_Files &> acc.log + if [ $? -ne 0 ]; then + echo "calculate accuracy failed" + exit 1 + fi +} + +compile_app +infer +cal_acc