forked from mindspore-Ecosystem/mindspore
ascend 310 inference for ssd-resnet50
This commit is contained in:
parent
43c61ff4f0
commit
9b564c70ae
|
@ -16,7 +16,10 @@
|
|||
- [Evaluation Process](#evaluation-process)
|
||||
- [Evaluation on Ascend](#evaluation-on-ascend)
|
||||
- [Performance](#performance)
|
||||
- [Export MindIR](#export-mindir)
|
||||
- [Export Process](#Export-process)
|
||||
- [Export](#Export)
|
||||
- [Inference Process](#Inference-process)
|
||||
- [Inference](#Inference)
|
||||
- [Description of Random Situation](#description-of-random-situation)
|
||||
- [ModelZoo Homepage](#modelzoo-homepage)
|
||||
|
||||
|
@ -150,9 +153,11 @@ Then you can run everything just like on ascend.
|
|||
└─ cv
|
||||
└─ ssd
|
||||
├─ README.md # descriptions about SSD
|
||||
├─ ascend310_infer # application for 310 inference
|
||||
├─ scripts
|
||||
├─ run_distribute_train.sh # shell script for distributed on ascend
|
||||
└─ run_eval.sh # shell script for eval on ascend
|
||||
├─ run_eval.sh # shell script for eval on ascend
|
||||
└─ run_infer_310.sh # shell script for 310 inference
|
||||
├─ src
|
||||
├─ __init__.py # init file
|
||||
├─ box_utils.py # bbox utils
|
||||
|
@ -165,6 +170,7 @@ Then you can run everything just like on ascend.
|
|||
├─ eval.py # eval scripts
|
||||
├─ train.py # train scripts
|
||||
├─ export.py # export mindir script
|
||||
├─ postprogress.py # post process for 310 inference
|
||||
└─ mindspore_hub_conf.py # mindspore hub interface
|
||||
```
|
||||
|
||||
|
@ -275,6 +281,47 @@ mAP: 0.32719216721918915
|
|||
|
||||
```
|
||||
|
||||
## [Export Process](#contents)
|
||||
|
||||
### [Export](#content)
|
||||
|
||||
```shell
|
||||
python export.py --ckpt_file [CKPT_PATH] --device_target [DEVICE_TARGET] --file_format[EXPORT_FORMAT]
|
||||
```
|
||||
|
||||
`EXPORT_FORMAT` should be in ["AIR", "MINDIR"]
|
||||
|
||||
## [Inference Process](#contents)
|
||||
|
||||
### [Inference](#content)
|
||||
|
||||
Before performing inference, we need to export model first. Air model can only be exported in Ascend 910 environment, mindir model can be exported in any environment.
|
||||
Current batch_ Size can only be set to 1.
|
||||
|
||||
```shell
|
||||
# Ascend310 inference
|
||||
bash run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DVPP] [DEVICE_ID]
|
||||
```
|
||||
|
||||
Inference result will be stored in the example path, you can find result like the followings in acc.log.
|
||||
|
||||
```shell
|
||||
Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.327
|
||||
Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.475
|
||||
Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.358
|
||||
Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.115
|
||||
Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.353
|
||||
Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.455
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.314
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.485
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.509
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.200
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.554
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.692
|
||||
|
||||
mAP: 0.3266651054070853
|
||||
```
|
||||
|
||||
### [Performance](#contents)
|
||||
|
||||
| 参数 | Ascend |
|
||||
|
|
|
@ -16,7 +16,10 @@
|
|||
- [评估过程](#评估过程)
|
||||
- [Ascend处理器环境评估](#ascend处理器环境评估)
|
||||
- [性能](#性能)
|
||||
- [导出MindIR](#导出MindIR)
|
||||
- [导出过程](#导出过程)
|
||||
- [导出](#导出)
|
||||
- [推理过程](#推理过程)
|
||||
- [推理](#推理)
|
||||
- [随机情况说明](#随机情况说明)
|
||||
- [ModelZoo主页](#modelzoo主页)
|
||||
|
||||
|
@ -112,8 +115,10 @@ sh run_eval.sh [DATASET] [CHECKPOINT_PATH] [DEVICE_ID]
|
|||
└─ cv
|
||||
└─ ssd
|
||||
├─ README.md ## SSD相关说明
|
||||
├─ ascend310_infer ## 实现310推理源代码
|
||||
├─ scripts
|
||||
├─ run_distribute_train.sh ## Ascend分布式shell脚本
|
||||
├─ run_infer_310.sh ## Ascend推理shell脚本
|
||||
└─ run_eval.sh ## Ascend评估shell脚本
|
||||
├─ src
|
||||
├─ __init__.py ## 初始化文件
|
||||
|
@ -125,6 +130,8 @@ sh run_eval.sh [DATASET] [CHECKPOINT_PATH] [DEVICE_ID]
|
|||
├─ lr_schedule.py ## 学习率生成器
|
||||
└─ ssd.py ## SSD架构
|
||||
├─ eval.py ## 评估脚本
|
||||
├─ export.py ## 导出 AIR,MINDIR模型的脚本
|
||||
├─ postprogress.py ## 310推理后处理脚本
|
||||
├─ train.py ## 训练脚本
|
||||
└─ mindspore_hub_conf.py ## MindSpore Hub接口
|
||||
```
|
||||
|
@ -233,6 +240,39 @@ mAP: 0.32719216721918915
|
|||
|
||||
```
|
||||
|
||||
## 导出过程
|
||||
|
||||
### 导出
|
||||
|
||||
```shell
|
||||
python export.py --ckpt_file [CKPT_PATH] --device_target [DEVICE_TARGET] --file_format[EXPORT_FORMAT]
|
||||
```
|
||||
|
||||
`EXPORT_FORMAT`可选 ["AIR", "MINDIR"]
|
||||
|
||||
## 推理过程
|
||||
|
||||
### 推理
|
||||
|
||||
在还行推理之前我们需要先导出模型。Air模型只能在昇腾910环境上导出,mindir可以在任意环境上导出。batch_size只支持1。
|
||||
|
||||
```shell
|
||||
Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.327
|
||||
Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.475
|
||||
Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.358
|
||||
Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.115
|
||||
Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.353
|
||||
Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.455
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.314
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.485
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.509
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.200
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.554
|
||||
Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.692
|
||||
|
||||
mAP: 0.3266651054070853
|
||||
```
|
||||
|
||||
### 性能
|
||||
|
||||
| 参数 | Ascend |
|
||||
|
@ -245,12 +285,6 @@ mAP: 0.32719216721918915
|
|||
| mAP | IoU=0.50: 32.7% |
|
||||
| 模型大小 | 281M(.ckpt文件) |
|
||||
|
||||
## 导出MindIR
|
||||
|
||||
```shell
|
||||
python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --file_format [FILE_FORMAT]
|
||||
```
|
||||
|
||||
参数ckpt_file为必填项,
|
||||
`EXPORT_FORMAT` 必须在 ["AIR", "MINDIR"]中选择。
|
||||
|
||||
|
|
|
@ -22,20 +22,20 @@
|
|||
#include <iosfwd>
|
||||
#include <vector>
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
|
||||
#include "include/api/model.h"
|
||||
#include "include/api/context.h"
|
||||
#include "include/api/types.h"
|
||||
#include "include/api/serialization.h"
|
||||
#include "include/minddata/dataset/include/vision_ascend.h"
|
||||
#include "include/minddata/dataset/include/execute.h"
|
||||
#include "include/minddata/dataset/include/vision.h"
|
||||
#include "include/dataset/vision_ascend.h"
|
||||
#include "include/dataset/execute.h"
|
||||
#include "include/dataset/vision.h"
|
||||
#include "inc/utils.h"
|
||||
|
||||
using mindspore::GlobalContext;
|
||||
using mindspore::Context;
|
||||
using mindspore::Serialization;
|
||||
using mindspore::Model;
|
||||
using mindspore::ModelContext;
|
||||
using mindspore::Status;
|
||||
using mindspore::ModelType;
|
||||
using mindspore::GraphCell;
|
||||
|
@ -64,21 +64,24 @@ int main(int argc, char **argv) {
|
|||
return 1;
|
||||
}
|
||||
|
||||
GlobalContext::SetGlobalDeviceTarget(mindspore::kDeviceTypeAscend310);
|
||||
GlobalContext::SetGlobalDeviceID(FLAGS_device_id);
|
||||
auto graph = Serialization::LoadModel(FLAGS_mindir_path, ModelType::kMindIR);
|
||||
auto model_context = std::make_shared<mindspore::ModelContext>();
|
||||
auto context = std::make_shared<Context>();
|
||||
auto ascend310 = std::make_shared<mindspore::Ascend310DeviceInfo>();
|
||||
ascend310->SetDeviceID(FLAGS_device_id);
|
||||
ascend310->SetBufferOptimizeMode("off_optimize");
|
||||
context->MutableDeviceInfo().push_back(ascend310);
|
||||
mindspore::Graph graph;
|
||||
Serialization::Load(FLAGS_mindir_path, ModelType::kMindIR, &graph);
|
||||
if (FLAGS_cpu_dvpp == "DVPP") {
|
||||
if (RealPath(FLAGS_aipp_path).empty()) {
|
||||
std::cout << "Invalid aipp path" << std::endl;
|
||||
return 1;
|
||||
} else {
|
||||
ModelContext::SetInsertOpConfigPath(model_context, FLAGS_aipp_path);
|
||||
ascend310->SetInsertOpConfigPath(FLAGS_aipp_path);
|
||||
}
|
||||
}
|
||||
|
||||
Model model(GraphCell(graph), model_context);
|
||||
Status ret = model.Build();
|
||||
Model model;
|
||||
Status ret = model.Build(GraphCell(graph), context);
|
||||
if (ret != kSuccess) {
|
||||
std::cout << "ERROR: Build failed." << std::endl;
|
||||
return 1;
|
||||
|
@ -142,7 +145,7 @@ int main(int argc, char **argv) {
|
|||
}
|
||||
double average = 0.0;
|
||||
int inferCount = 0;
|
||||
char tmpCh[256] = {0};
|
||||
|
||||
for (auto iter = costTime_map.begin(); iter != costTime_map.end(); iter++) {
|
||||
double diff = 0.0;
|
||||
diff = iter->second - iter->first;
|
||||
|
@ -150,12 +153,12 @@ int main(int argc, char **argv) {
|
|||
inferCount++;
|
||||
}
|
||||
average = average / inferCount;
|
||||
snprintf(tmpCh, sizeof(tmpCh), \
|
||||
"NN inference cost average time: %4.3f ms of infer_count %d \n", average, inferCount);
|
||||
std::stringstream timeCost;
|
||||
timeCost << "NN inference cost average time: "<< average << " ms of infer_count " << inferCount << std::endl;
|
||||
std::cout << "NN inference cost average time: "<< average << "ms of infer_count " << inferCount << std::endl;
|
||||
std::string fileName = "./time_Result" + std::string("/test_perform_static.txt");
|
||||
std::ofstream fileStream(fileName.c_str(), std::ios::trunc);
|
||||
fileStream << tmpCh;
|
||||
fileStream << timeCost.str();
|
||||
fileStream.close();
|
||||
costTime_map.clear();
|
||||
return 0;
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
if [[ $# -lt 3 || $# -gt 4 ]]; then
|
||||
echo "Usage: bash run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DVPP] [DEVICE_ID]
|
||||
DVPP is mandatory, and must choose from [DVPP|CPU], it's case-insensitive
|
||||
DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
get_real_path(){
|
||||
if [ "${1:0:1}" == "/" ]; then
|
||||
echo "$1"
|
||||
else
|
||||
echo "$(realpath -m $PWD/$1)"
|
||||
fi
|
||||
}
|
||||
model=$(get_real_path $1)
|
||||
data_path=$(get_real_path $2)
|
||||
DVPP=${3^^}
|
||||
|
||||
device_id=0
|
||||
if [ $# == 4 ]; then
|
||||
device_id=$4
|
||||
fi
|
||||
|
||||
echo "mindir name: "$model
|
||||
echo "dataset path: "$data_path
|
||||
echo "image process mode: "$DVPP
|
||||
echo "device id: "$device_id
|
||||
|
||||
export ASCEND_HOME=/usr/local/Ascend/
|
||||
if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then
|
||||
export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH
|
||||
export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH
|
||||
export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe
|
||||
export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH
|
||||
export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp
|
||||
else
|
||||
export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH
|
||||
export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH
|
||||
export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH
|
||||
export ASCEND_OPP_PATH=$ASCEND_HOME/opp
|
||||
fi
|
||||
|
||||
function compile_app()
|
||||
{
|
||||
cd ../ascend310_infer || exit
|
||||
bash build.sh &> build.log
|
||||
}
|
||||
|
||||
function infer()
|
||||
{
|
||||
cd - || exit
|
||||
if [ -d result_Files ]; then
|
||||
rm -rf ./result_Files
|
||||
fi
|
||||
if [ -d time_Result ]; then
|
||||
rm -rf ./time_Result
|
||||
fi
|
||||
mkdir result_Files
|
||||
mkdir time_Result
|
||||
if [ "$DVPP" == "DVPP" ];then
|
||||
../ascend310_infer/out/main --mindir_path=$model --dataset_path=$data_path --device_id=$device_id --cpu_dvpp=$DVPP --aipp_path=../ascend310_infer/aipp.cfg --image_height=640 --image_width=640 &> infer.log
|
||||
elif [ "$DVPP" == "CPU" ]; then
|
||||
../ascend310_infer/out/main --mindir_path=$model --dataset_path=$data_path --cpu_dvpp=$DVPP --device_id=$device_id --image_height=300 --image_width=300 &> infer.log
|
||||
else
|
||||
echo "image process mode must be in [DVPP|CPU]"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function cal_acc()
|
||||
{
|
||||
python3.7 ../postprocess.py --result_path=./result_Files --img_path=$data_path --drop &> acc.log &
|
||||
}
|
||||
|
||||
compile_app
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "compile app code failed"
|
||||
exit 1
|
||||
fi
|
||||
infer
|
||||
if [ $? -ne 0 ]; then
|
||||
echo " execute inference failed"
|
||||
exit 1
|
||||
fi
|
||||
cal_acc
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "calculate accuracy failed"
|
||||
exit 1
|
||||
fi
|
Loading…
Reference in New Issue