add run script && fix bug

This commit is contained in:
zhujingxuan 2021-03-11 14:38:28 +08:00
parent 9ca5cd3129
commit 5ff85ea8a4
4 changed files with 148 additions and 15 deletions

View File

@ -20,7 +20,7 @@
#include "coder/opcoders/serializers/nnacl_serializer/nnacl_fp32_serializer.h"
#include "coder/opcoders/file_collector.h"
using mindspore::schema::PrimitiveType_PowFusion;
using mindspore::schema::PrimitiveType_ReduceFusion;
namespace mindspore::lite::micro::nnacl {
int ReduceFP32Coder::Prepare(CoderContext *const context) {
@ -116,6 +116,6 @@ int ReduceFP32Coder::DoCode(CoderContext *const context) {
return RET_OK;
}
REG_OPERATOR_CODER(kAllTargets, kNumberTypeFloat32, PrimitiveType_PowFusion, CPUOpCoderCreator<ReduceFP32Coder>)
REG_OPERATOR_CODER(kAllTargets, kNumberTypeFloat32, PrimitiveType_ReduceFusion, CPUOpCoderCreator<ReduceFP32Coder>)
} // namespace mindspore::lite::micro::nnacl

View File

@ -22,15 +22,19 @@
namespace mindspore::lite::micro::nnacl {
void NNaclFp32Serializer::CodeStruct(const std::string &name, const PoolingParameter &pooling_parameter) {
CodeBaseStruct("PoolingParameter", name, pooling_parameter.op_parameter_, pooling_parameter.pool_mode_,
pooling_parameter.round_mode_, pooling_parameter.act_type_, pooling_parameter.avg_mode_,
CodeBaseStruct("PoolingParameter", name,
// Primitive parameter
pooling_parameter.op_parameter_, pooling_parameter.pool_mode_, pooling_parameter.round_mode_,
pooling_parameter.pad_mode_, pooling_parameter.act_type_, pooling_parameter.avg_mode_,
pooling_parameter.global_, pooling_parameter.window_w_, pooling_parameter.window_h_,
pooling_parameter.stride_w_, pooling_parameter.stride_h_, pooling_parameter.input_w_,
pooling_parameter.input_w_, pooling_parameter.input_batch_, pooling_parameter.input_channel_,
pooling_parameter.output_w_, pooling_parameter.output_h_, pooling_parameter.output_batch_,
pooling_parameter.output_channel_, pooling_parameter.pad_u_, pooling_parameter.pad_d_,
pooling_parameter.pad_l_, pooling_parameter.pad_r_, pooling_parameter.thread_num_, "NULL",
pooling_parameter.quantize_);
pooling_parameter.stride_w_, pooling_parameter.stride_h_,
// shape correlative
pooling_parameter.input_w_, pooling_parameter.input_h_, pooling_parameter.input_batch_,
pooling_parameter.input_channel_, pooling_parameter.output_w_, pooling_parameter.output_h_,
pooling_parameter.output_batch_, pooling_parameter.output_channel_, pooling_parameter.pad_u_,
pooling_parameter.pad_d_, pooling_parameter.pad_l_, pooling_parameter.pad_r_,
// other parameter
pooling_parameter.thread_num_, nullptr, pooling_parameter.quantize_);
}
void NNaclFp32Serializer::CodeStruct(const std::string &name, const BatchNormParameter &batch_norm_parameter) {

View File

@ -106,15 +106,19 @@ void NNaclInt8Serializer::CodeStruct(const std::string &name, const PoolingParam
<< " &" << in_quant_name << ", "
<< " &" << out_quant_name << "};\n";
CodeBaseStruct("PoolingParameter", name, pooling_parameter.op_parameter_, pooling_parameter.pool_mode_,
pooling_parameter.round_mode_, pooling_parameter.pad_mode_, pooling_parameter.act_type_,
pooling_parameter.avg_mode_, pooling_parameter.global_, pooling_parameter.window_w_,
pooling_parameter.window_h_, pooling_parameter.stride_w_, pooling_parameter.stride_h_,
CodeBaseStruct("PoolingParameter", name,
// Primitive parameter
pooling_parameter.op_parameter_, pooling_parameter.pool_mode_, pooling_parameter.round_mode_,
pooling_parameter.pad_mode_, pooling_parameter.act_type_, pooling_parameter.avg_mode_,
pooling_parameter.global_, pooling_parameter.window_w_, pooling_parameter.window_h_,
pooling_parameter.stride_w_, pooling_parameter.stride_h_,
// shape correlative
pooling_parameter.input_w_, pooling_parameter.input_h_, pooling_parameter.input_batch_,
pooling_parameter.input_channel_, pooling_parameter.output_w_, pooling_parameter.output_h_,
pooling_parameter.output_batch_, pooling_parameter.output_channel_, pooling_parameter.pad_u_,
pooling_parameter.pad_d_, pooling_parameter.pad_l_, pooling_parameter.pad_r_,
pooling_parameter.op_parameter_.thread_num_, quant_name, pooling_parameter.quantize_);
// other parameter
pooling_parameter.thread_num_, quant_name, pooling_parameter.quantize_);
}
void NNaclInt8Serializer::CodeStruct(const std::string &name, const SoftmaxParameter &softmax_parameter) {

View File

@ -0,0 +1,125 @@
#!/bin/bash
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
set -e
CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
MINDSPORE_ROOT_DIR=${${CURRENT_DIR}%%/mindspore/lite/micro/example/mobilenetv2_quant}
OUTPUT_DIR=${1:-${MINDSPORE_ROOT_DIR}/output}
THREAD_NUM=${2:-32}
MODULE_NAME=mobilenetv2_quant
OUTPUT_IR=Reshape-64.ir
CALIB_OUT=${CURRENT_DIR}/Reshape-64.out
echo "current dir is: ${CURRENT_DIR}"
echo "packed output dir is :${OUTPUT_DIR}"
if [ ! -d "${OUTPUT_DIR}" ]; then
echo "folder ${OUTPUT_DIR} does not exist"
return 1
fi
# rm if already exist
WORKSPACE=${CURRENT_DIR}/build
rm -rf ${WORKSPACE}
mkdir ${WORKSPACE} || exit 1
PROJECT_DIR=${WORKSPACE}/${MODULE_NAME}
compare_output() {
local OUTPUT_FILE=$1
local CALIB_FILE=$2
if [[ ! -f "${OUTPUT_FILE}" || ! -f "${CALIB_FILE}" ]]; then
echo "file ${OUTPUT_FILE}, ${CALIB_FILE} does not exist, pwd $(pwd)"
exit 1
fi
lines=$(cat ${CALIB_FILE} | wc -l)
for ((i = 1; i <= $lines; i++)); do
line1=$(awk 'NR=="'${i}'"{print $0}' ${CALIB_FILE})
line2=$(awk 'NR=="'${i}'"{print $0}' ${OUTPUT_FILE})
if [[ "${line1}" != "${line2}" ]]; then
echo -e "file ${OUTPUT_FILE}, ${CALIB_FILE}, compare failed! line: ${i}"
exit 1
fi
done
echo -e "compare success, ${OUTPUT_FILE}, ${CALIB_FILE}"
}
# cp oplib and codegen
cp ${OUTPUT_DIR}/mindspore-lite-*-codegen-linux-x64.tar.gz ${WORKSPACE}/ || exit 1
cd ${WORKSPACE} || exit 1
tar -zxf mindspore-lite-*-codegen-linux-x64.tar.gz || exit 1
cd mindspore-lite-*-codegen-linux-x64 || exit 1
mv operator_library/ ${WORKSPACE}/ || exit 1
mv codegen ${WORKSPACE}/ || exit 1
cd -
rm -r mindspore-lite-*-codegen-linux-x64 || exit 1
rm mindspore-lite-*-codegen-linux-x64.tar.gz || exit 1
# convert model
cp ${OUTPUT_DIR}/mindspore-lite-*-converter-linux-x64.tar.gz ${WORKSPACE}/ || exit 1
cd ${WORKSPACE} || exit 1
tar -zxf mindspore-lite-*-converter-linux-x64.tar.gz || exit 1
rm mindspore-lite-*-converter-linux-x64.tar.gz || exit 1
cd mindspore-lite-*-converter-linux-x64 || exit 1
export LD_LIBRARY_PATH=./lib/:./third_party/protobuf/lib:./third_party/flatbuffers/lib:./third_party/glog/lib
converter/converter_lite --fmk=TFLITE \
--modelFile=${CURRENT_DIR}/mobilenet_v2_1.0_224_quant.tflite \
--outputFile=${WORKSPACE}/mobilenet_v2
cd -
rm -rf mindspore-lite-*-converter-linux-x64 || exit 1
# generate code
${WORKSPACE}/codegen --modelPath=${WORKSPACE}/mobilenet_v2.ms \
--moduleName=${MODULE_NAME} \
--isWeightFile=true \
--debugMode=true
rm codegen
if [ ! -d "${PROJECT_DIR}" ]; then
echo "folder ${PROJECT_DIR} does not exist"
return 1
fi
cd ${PROJECT_DIR} || exit 1
# 1. build static lib.a
echo -e "building static library"
mkdir -p src/build && cd src/build || exit 1
OP_HEADER_PATH=${WORKSPACE}/operator_library/include
OP_LIB=${WORKSPACE}/operator_library/lib/x86/libops.a
echo "Head Path: ${OP_HEADER_PATH}"
echo "Lib Path: ${OP_LIB}"
cmake -DCMAKE_BUILD_TYPE=Debug \
-DOP_LIB=${OP_LIB} \
-DOP_HEADER_PATH=${OP_HEADER_PATH} ..
make -j${THREAD_NUM}
# 2. build benchmark
cd ${PROJECT_DIR}/benchmark && mkdir -p build && cd build || exit 1
cmake -DMODEL_LIB="${PROJECT_DIR}/src/build/libnet.a" ..
make -j${THREAD_NUM}
echo "net file: ${PROJECT_DIR}/src/${MODULE_NAME}.net"
# 3. run benchmark
./benchmark ${CURRENT_DIR}/input_1_224_224_3_uint8.bin ${PROJECT_DIR}/src/${MODULE_NAME}.net
compare_output ${OUTPUT_IR} ${CALIB_OUT}
RET=$?
if [[ "${RET}" -eq 0 ]]; then
echo -e "run benchmark success: ${MODULE_NAME}"
else
echo -e "run benchmark failed: ${MODULE_NAME}"
exit 1
fi