!3620 add mindspore lite

Merge pull request !3620 from 张学同/to_merge
This commit is contained in:
mindspore-ci-bot 2020-07-29 10:54:41 +08:00 committed by Gitee
commit b75943f220
885 changed files with 97615 additions and 300 deletions

16
.gitignore vendored
View File

@ -4,6 +4,20 @@ mindspore/lib
output output
*.ir *.ir
# flatbuffer
mindspore/lite/tools/converter/parser/tflite/schema_generated.h
mindspore/lite/tools/converter/parser/caffe/caffe.pb.cc
mindspore/lite/tools/converter/parser/caffe/caffe.pb.h
mindspore/lite/tools/converter/parser/onnx/onnx.pb.h
mindspore/lite/tools/converter/parser/onnx/onnx.pb.h
mindspore/lite/tools/converter/schema/*.h
mindspore/lite/tools/converter/schema/inner
mindspore/lite/schema/*.h
mindspore/lite/schema/inner
mindspore/lite/src/runtime/kernel/opencl/cl/fp16/*.inc
mindspore/lite/src/runtime/kernel/opencl/cl/fp32/*.inc
# Cmake files # Cmake files
CMakeFiles/ CMakeFiles/
cmake_install.cmake cmake_install.cmake
@ -71,5 +85,3 @@ test_temp_summary_event_file/
mindspore/version.py mindspore/version.py
mindspore/default_config.py mindspore/default_config.py
mindspore/.commit_id mindspore/.commit_id
onnx.proto
mindspore/ccsrc/onnx.proto

8
.gitmodules vendored
View File

@ -1,6 +1,7 @@
[submodule "third_party/flatbuffers"] [submodule "third_party/flatbuffers"]
path = third_party/flatbuffers path = third_party/flatbuffers
url = https://github.com/google/flatbuffers.git url = https://github.com/google/flatbuffers.git
ignore = all
[submodule "third_party/googletest"] [submodule "third_party/googletest"]
path = third_party/googletest path = third_party/googletest
url = https://github.com/google/googletest.git url = https://github.com/google/googletest.git
@ -10,9 +11,16 @@
[submodule "third_party/protobuf"] [submodule "third_party/protobuf"]
path = third_party/protobuf path = third_party/protobuf
url = https://github.com/protocolbuffers/protobuf.git url = https://github.com/protocolbuffers/protobuf.git
ignore = all
[submodule "akg"] [submodule "akg"]
path = akg path = akg
url = https://gitee.com/mindspore/akg.git url = https://gitee.com/mindspore/akg.git
[submodule "graphengine"] [submodule "graphengine"]
path = graphengine path = graphengine
url = https://gitee.com/mindspore/graphengine.git url = https://gitee.com/mindspore/graphengine.git
[submodule "third_party/OpenCL-CLHPP"]
path = third_party/OpenCL-CLHPP
url = https://github.com/KhronosGroup/OpenCL-CLHPP.git
[submodule "third_party/OpenCL-Headers"]
path = third_party/OpenCL-Headers
url = https://github.com/KhronosGroup/OpenCL-Headers.git

376
build.sh
View File

@ -25,7 +25,8 @@ usage()
echo "Usage:" echo "Usage:"
echo "bash build.sh [-d] [-r] [-v] [-c on|off] [-t on|off] [-g on|off] [-h] [-b ge] [-m infer|train] \\" echo "bash build.sh [-d] [-r] [-v] [-c on|off] [-t on|off] [-g on|off] [-h] [-b ge] [-m infer|train] \\"
echo " [-a on|off] [-Q on|off] [-p on|off] [-i] [-L] [-R] [-D on|off] [-j[n]] [-e gpu|d|cpu] \\" echo " [-a on|off] [-Q on|off] [-p on|off] [-i] [-L] [-R] [-D on|off] [-j[n]] [-e gpu|d|cpu] \\"
echo " [-P on|off] [-z [on|off]] [-M on|off] [-V 9.2|10.1] [-I] [-K] [-B on|off] [-w on|off] [-E] [-l on|off]" echo " [-P on|off] [-z [on|off]] [-M on|off] [-V 9.2|10.1] [-I arm64|arm32|x86_64] [-K] \\"
echo " [-B on|off] [-w on|off] [-E] [-l on|off]"
echo "" echo ""
echo "Options:" echo "Options:"
echo " -d Debug mode" echo " -d Debug mode"
@ -51,7 +52,7 @@ usage()
echo " -z Compile dataset & mindrecord, default on" echo " -z Compile dataset & mindrecord, default on"
echo " -M Enable MPI and NCCL for GPU training, gpu default on" echo " -M Enable MPI and NCCL for GPU training, gpu default on"
echo " -V Specify the minimum required cuda version, default CUDA 10.1" echo " -V Specify the minimum required cuda version, default CUDA 10.1"
echo " -I Compile predict, default off" echo " -I Compile lite"
echo " -K Compile with AKG, default on" echo " -K Compile with AKG, default on"
echo " -s Enable serving module, default off" echo " -s Enable serving module, default off"
echo " -w Enable acl module, default off" echo " -w Enable acl module, default off"
@ -93,9 +94,10 @@ checkopts()
COMPILE_MINDDATA="on" COMPILE_MINDDATA="on"
ENABLE_MPI="off" ENABLE_MPI="off"
CUDA_VERSION="10.1" CUDA_VERSION="10.1"
COMPILE_PREDICT="off" COMPILE_LITE="off"
LITE_PLATFORM=""
SUPPORT_TRAIN="off"
USE_GLOG="on" USE_GLOG="on"
PREDICT_PLATFORM=""
ENABLE_AKG="on" ENABLE_AKG="on"
ENABLE_SERVING="off" ENABLE_SERVING="off"
ENABLE_ACL="off" ENABLE_ACL="off"
@ -240,13 +242,16 @@ checkopts()
fi fi
;; ;;
I) I)
COMPILE_PREDICT="on" COMPILE_LITE="on"
if [[ "$OPTARG" == "arm64" ]]; then if [[ "$OPTARG" == "arm64" ]]; then
PREDICT_PLATFORM="arm64" LITE_PLATFORM="arm64"
elif [[ "$OPTARG" == "arm32" ]]; then
LITE_PLATFORM="arm32"
elif [[ "$OPTARG" == "x86_64" ]]; then elif [[ "$OPTARG" == "x86_64" ]]; then
PREDICT_PLATFORM="x86_64" ENABLE_CONVERTER="on"
LITE_PLATFORM="x86_64"
else else
echo "-I parameter must be arm64 or x86_64" echo "-I parameter must be arm64、arm32 or x86_64"
exit 1 exit 1
fi fi
;; ;;
@ -382,128 +387,247 @@ build_mindspore()
echo "success to build mindspore project!" echo "success to build mindspore project!"
} }
build_predict() checkndk() {
{ if [ "${ANDROID_NDK}" ]; then
git submodule update --init --recursive third_party/incubator-tvm echo -e "\e[31mANDROID_NDK_PATH=$ANDROID_NDK \e[0m"
echo "start build predict project"
git submodule update --init --recursive third_party/flatbuffers
git submodule update --init --recursive third_party/googletest
git submodule update --init --recursive third_party/protobuf
rm -rf "${BASEPATH}/predict/build"
mkdir -pv "${BASEPATH}/predict/build"
rm -rf "${BASEPATH}/predict/output"
mkdir -pv "${BASEPATH}/predict/output"
if [[ "$PREDICT_PLATFORM" == "arm64" ]]; then
if [ "${ANDROID_NDK}" ]; then
echo -e "\e[31mANDROID_NDK_PATH=$ANDROID_NDK \e[0m"
else
echo -e "\e[31mplease set ANDROID_NDK_PATH in environment variable for example: export ANDROID_NDK=/root/usr/android-ndk-r16b/ \e[0m"
exit 1
fi
fi
#build flatbuf
cd "${BASEPATH}/third_party/flatbuffers"
rm -rf build && mkdir -p build && cd build && cmake .. && make -j$THREAD_NUM
FLATC="${BASEPATH}"/third_party/flatbuffers/build/flatc
cd "${BASEPATH}"/predict/schema && mkdir -p "${BASEPATH}"/predict/schema/inner
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b --reflect-types --gen-mutable --reflect-names --gen-object-api -o ${BASEPATH}/predict/schema/inner
# check LLVM_PATH
if [ "${LLVM_PATH}" == "" ]; then
echo "Please set LLVM_PATH in env for example export LLVM_PATH=/xxxx/bin/llvm-config"
exit
fi
#build tvm
tvm_open_source="${BASEPATH}/third_party/incubator-tvm"
tvm_kernel_build="${BASEPATH}/predict/module/tvm_kernel"
if [ ! -f "${tvm_kernel_build}"/incubator-tvm/build/libtvm.so ]; then
rm -fr "${tvm_kernel_build}"/incubator-tvm
cp -fr "${tvm_open_source}" "${tvm_kernel_build}"
mkdir -p "${tvm_kernel_build}"/incubator-tvm/build
patch -d "${tvm_kernel_build}"/incubator-tvm -p1 < "${BASEPATH}"/third_party/patch/predict/0001-RetBugFix-CustomRuntime_v06.patch
cp "${tvm_kernel_build}"/lite/src/codegen/llvm/lite_rtfunc_reset.cc "${tvm_kernel_build}"/incubator-tvm/src/codegen/llvm/
cp "${tvm_open_source}"/cmake/config.cmake "${tvm_kernel_build}"/incubator-tvm
if [ "${LLVM_PATH}" ]; then
sed -i "s#set(USE_LLVM .*)#set(USE_LLVM \"${LLVM_PATH}\")#g" "${tvm_kernel_build}"/incubator-tvm/config.cmake
else
echo "need set LLVM_PATH in env for example export LLVM_PATH=/xxxx/bin/llvm-config"
fi
cd "${tvm_kernel_build}"/incubator-tvm/build
cmake ..
make -j$THREAD_NUM
else else
cd "${tvm_kernel_build}"/incubator-tvm/build echo -e "\e[31mplease set ANDROID_NDK_PATH in environment variable for example: export ANDROID_NDK=/root/usr/android-ndk-r20b/ \e[0m"
make -j$THREAD_NUM exit 1
fi fi
#gen op
predict_tvm_op_lib_path="${BASEPATH}/predict/module/tvm_kernel/build/lib_x86"
predict_platform="x86"
if [[ "$PREDICT_PLATFORM" == "arm64" ]]; then
predict_tvm_op_lib_path="${BASEPATH}/predict/module/tvm_kernel/build/lib_arm64"
predict_platform="arm64"
fi
need_get_libs=true
if [ -d "${predict_tvm_op_lib_path}" ]; then
file_list=$(ls "${predict_tvm_op_lib_path}")
if [ -n "${file_list}" ]; then
libstime=$(stat -c %Y "${predict_tvm_op_lib_path}"/* | sort -u | tail -n1)
pythontime=$(find "${BASEPATH}"/predict/module/tvm_kernel/lite/python/ -name "*.py" -exec stat -c %Y {} \; |
sort -u | tail -n1)
if [ "${libstime}" -ge "${pythontime}" ]; then
need_get_libs=false
else
rm -fr "${predict_tvm_op_lib_path}"
fi
fi
fi
if $need_get_libs; then
PYTHONPATH_OLD=${PYTHONPATH}
export PYTHONPATH="${tvm_kernel_build}/incubator-tvm/python:${tvm_kernel_build}/incubator-tvm/topi/python:${tvm_kernel_build}/incubator-tvm/nnvm/python:${tvm_kernel_build}/lite/python:"
cd "${BASEPATH}"/predict/module/tvm_kernel/lite/python/at_ops
python3 at_gen_strip.py ${predict_platform}
export PYTHONPATH=${PYTHONPATH_OLD}
fi
cd "${BASEPATH}/predict/build"
if [[ "$PREDICT_PLATFORM" == "arm64" ]]; then
cmake -DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" \
-DANDROID_NATIVE_API_LEVEL=android-19 -DANDROID_NDK="${ANDROID_NDK}" \
-DANDROID_TOOLCHAIN_NAME="aarch64-linux-android-clang" -DANDROID_STL="c++_shared" \
-DANDROID_ABI="arm64-v8a" -DENABLE_PREDICT_ARM64=ON -DANDROID_ALLOW_UNDEFINED_SYMBOLS=TRUE ..
elif [[ "$PREDICT_PLATFORM" == "x86_64" ]]; then
cmake ..
fi
make ${VERBOSE} -j$THREAD_NUM
if [[ "$PREDICT_PLATFORM" == "x86_64" ]]; then
cd "${BASEPATH}/predict/build/test" && ./run_tests.sh
fi
# copy securec include files
mkdir -p "${BASEPATH}/predict/output/include/securec/include"
cp "${BASEPATH}"/third_party/securec/include/* "${BASEPATH}"/predict/output/include/securec/include
cd "${BASEPATH}/predict/output/"
if [[ "$PREDICT_PLATFORM" == "x86_64" ]]; then
tar -cf MSPredict-0.5.0-linux_x86_64.tar.gz include/ lib/ --warning=no-file-changed
elif [[ "$PREDICT_PLATFORM" == "arm64" ]]; then
tar -cf MSPredict-0.5.0-linux_aarch64.tar.gz include/ lib/ --warning=no-file-changed
fi
echo "success to build predict project!"
} }
if [[ "X$COMPILE_PREDICT" = "Xon" ]]; then gene_flatbuffer() {
build_predict FLAT_DIR="${BASEPATH}/mindspore/lite/schema"
echo "---------------- mindspore: build end ----------------" cd ${FLAT_DIR} && rm -rf "${FLAT_DIR}/inner" && mkdir -p "${FLAT_DIR}/inner"
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b --reflect-types --gen-mutable --reflect-names --gen-object-api -o "${FLAT_DIR}/inner"
FLAT_DIR="${BASEPATH}/mindspore/lite/tools/converter/parser/tflite"
cd ${FLAT_DIR}
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b --reflect-types --gen-mutable --reflect-names --gen-object-api -o "${FLAT_DIR}/"
}
build_flatbuffer() {
cd ${BASEPATH}
FLATC="${BASEPATH}"/third_party/flatbuffers/build/flatc
if [[ ! -f "${FLATC}" ]]; then
git submodule update --init --recursive third_party/flatbuffers
cd ${BASEPATH}/third_party/flatbuffers
rm -rf build && mkdir -pv build && cd build && cmake .. && make -j$THREAD_NUM
gene_flatbuffer
fi
if [[ "${INC_BUILD}" == "off" ]]; then
gene_flatbuffer
fi
}
gene_protobuf() {
PROTO_SRC_DIR="${BASEPATH}/mindspore/lite/tools/converter/parser/caffe"
find ${PROTO_SRC_DIR} -name "*.proto" -print0 | xargs -0 "${PROTOC}" -I"${PROTO_SRC_DIR}" --cpp_out="${PROTO_SRC_DIR}"
PROTO_SRC_DIR="${BASEPATH}/mindspore/lite/tools/converter/parser/onnx"
find ${PROTO_SRC_DIR} -name "*.proto" -print0 | xargs -0 "${PROTOC}" -I"${PROTO_SRC_DIR}" --cpp_out="${PROTO_SRC_DIR}"
}
build_protobuf() {
cd ${BASEPATH}
PROTOC="${BASEPATH}"/third_party/protobuf/build/bin/protoc
if [[ ! -f "${PROTOC}" ]]; then
git submodule update --init --recursive third_party/protobuf
cd ${BASEPATH}/third_party/protobuf
rm -rf build && mkdir -pv build && ./autogen.sh
./configure --prefix=${BASEPATH}/third_party/protobuf/build
make clean && make -j$THREAD_NUM && make install
gene_protobuf
fi
if [[ "${INC_BUILD}" == "off" ]]; then
gene_protobuf
fi
}
build_gtest() {
cd ${BASEPATH}
git submodule update --init --recursive third_party/googletest
}
gene_clhpp() {
CL_SRC_DIR="${BASEPATH}/mindspore/lite/src/runtime/kernel/opencl/cl"
for sub_dir in "${CL_SRC_DIR}"/*
do
data_type="$(basename ${sub_dir})"
if [ ! -d ${CL_SRC_DIR}/${data_type} ]; then
continue
fi
cd ${CL_SRC_DIR}/${data_type}
rm -rf *.inc
echo "$(cd "$(dirname $0)"; pwd)"
for file_path in "${CL_SRC_DIR}/${data_type}"/*
do
file="$(basename ${file_path})"
inc_file=`echo ${CL_SRC_DIR}/${data_type}/${file} | sed 's/$/.inc/'`
sed 's/^/\"/;s/$/ \\n\" \\/' ${CL_SRC_DIR}/${data_type}/${file} > ${inc_file}
kernel_name=`echo ${file} | sed s'/.\{3\}$//'`
sed -i "1i\static const char *${kernel_name}_source_${data_type} =\"\\n\" \\" ${inc_file}
sed -i '$a\;' ${inc_file}
done
done
}
gene_ocl_program() {
CL_SRC_DIR="${BASEPATH}/mindspore/lite/src/runtime/kernel/opencl/cl"
SPIRV_DIR=build/spirv
rm -rf ${SPIRV_DIR}
mkdir -pv ${SPIRV_DIR}
for sub_dir in "${CL_SRC_DIR}"/*
do
data_type="$(basename ${sub_dir})"
if [ ! -d ${CL_SRC_DIR}/${data_type} ]; then
continue
fi
#echo $(cd "$(dirname $0)"; pwd)
for file_path in "${CL_SRC_DIR}/${data_type}"/*
do
file="$(basename ${file_path})"
if [ "${file##*.}" != "cl" ]; then
continue
fi
clang -Xclang -finclude-default-header -cl-std=CL2.0 --target=spir64-unknown-unknown -emit-llvm \
-c -O0 -o ${SPIRV_DIR}/${file%.*}.bc ${CL_SRC_DIR}/${data_type}/${file}
done
done
bcs=`ls ${SPIRV_DIR}/*.bc`
llvm-link ${bcs} -o ${SPIRV_DIR}/program.bc
llvm-spirv -o ${SPIRV_DIR}/program.spv ${SPIRV_DIR}/program.bc
CL_PROGRAM_PATH="${BASEPATH}/mindspore/lite/src/runtime/kernel/opencl/cl/program.inc"
echo "#include <vector>" > ${CL_PROGRAM_PATH}
echo "std::vector<unsigned char> g_program_binary = {" >> ${CL_PROGRAM_PATH}
#hexdump -v -e '16/1 "0x%02x, " "\n"' ${SPIRV_DIR}/program.spv >> ${CL_PROGRAM_PATH}
hexdump -v -e '1/1 "0x%02x, "' ${SPIRV_DIR}/program.spv >> ${CL_PROGRAM_PATH}
echo "};" >> ${CL_PROGRAM_PATH}
echo "Compile SPIRV done"
}
build_opencl() {
cd ${BASEPATH}
git submodule update --init third_party/OpenCL-Headers
git submodule update --init third_party/OpenCL-CLHPP
if [[ "${OPENCL_OFFLINE_COMPILE}" == "on" ]]; then
gene_ocl_program
else
gene_clhpp
fi
}
build_lite()
{
echo "start build mindspore lite project"
if [[ "${ENABLE_GPU}" == "on" ]]; then
build_opencl
fi
if [[ "${LITE_PLATFORM}" == "x86_64" ]]; then
build_protobuf
fi
build_flatbuffer
build_gtest
cd "${BASEPATH}/mindspore/lite"
mkdir -pv build
cd build
BUILD_TYPE="Release"
if [[ "${DEBUG_MODE}" == "on" ]]; then
BUILD_TYPE="Debug"
fi
if [[ "${LITE_PLATFORM}" == "arm64" ]]; then
checkndk
cmake -DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" -DANDROID_NATIVE_API_LEVEL="19" \
-DANDROID_NDK="${ANDROID_NDK}" -DANDROID_ABI="arm64-v8a" -DANDROID_TOOLCHAIN_NAME="aarch64-linux-android-clang" \
-DANDROID_STL="c++_shared" -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DSUPPORT_TRAIN=${SUPPORT_TRAIN} \
-DBUILD_DEVICE=on -DPLATFORM_ARM64=on -DBUILD_CONVERTER=off -DENABLE_NEON=on -DENABLE_FP16="off" \
-DSUPPORT_GPU=${ENABLE_GPU} -DOFFLINE_COMPILE=${OPENCL_OFFLINE_COMPILE} "${BASEPATH}/mindspore/lite"
elif [[ "${LITE_PLATFORM}" == "arm32" ]]; then
checkndk
cmake -DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" -DANDROID_NATIVE_API_LEVEL="19" \
-DANDROID_NDK="${ANDROID_NDK}" -DANDROID_ABI="armeabi-v7a" -DANDROID_TOOLCHAIN_NAME="clang" \
-DANDROID_STL="c++_shared" -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-DBUILD_DEVICE=on -DPLATFORM_ARM32=on -DENABLE_NEON=on -DSUPPORT_TRAIN=${SUPPORT_TRAIN} -DBUILD_CONVERTER=off \
-DSUPPORT_GPU=${ENABLE_GPU} -DOFFLINE_COMPILE=${OPENCL_OFFLINE_COMPILE} "${BASEPATH}/mindspore/lite"
else
cmake -DBUILD_DEVICE=on -DPLATFORM_ARM64=off -DBUILD_CONVERTER=${ENABLE_CONVERTER} -DSUPPORT_TRAIN=${SUPPORT_TRAIN} \
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DSUPPORT_GPU=${ENABLE_GPU} -DOFFLINE_COMPILE=${OPENCL_OFFLINE_COMPILE} "${BASEPATH}/mindspore/lite"
fi
VERBOSE=2 make -j$THREAD_NUM
COMPILE_RET=$?
if [[ "${COMPILE_RET}" -ne 0 ]]; then
echo "---------------- mindspore lite: build failed ----------------"
else
mkdir -pv ${BASEPATH}/mindspore/lite/output/
if [[ "$LITE_PLATFORM" == "x86_64" ]]; then
OUTPUT_DIR=${BASEPATH}/mindspore/lite/output/MSLite-0.5.0-linux_x86_64
rm -rf ${OUTPUT_DIR} && mkdir -p ${OUTPUT_DIR} && cd ${OUTPUT_DIR}
mkdir -p ${OUTPUT_DIR}/converter && mkdir -p ${OUTPUT_DIR}/time_profile
mkdir -p ${OUTPUT_DIR}/benchmark && mkdir -p ${OUTPUT_DIR}/include && mkdir -p ${OUTPUT_DIR}/lib
mkdir -p ${OUTPUT_DIR}/third_party
cp ${BASEPATH}/mindspore/lite/build/tools/converter/converter_lite ${OUTPUT_DIR}/converter/
cp ${BASEPATH}/mindspore/lite/build/tools/benchmark/benchmark ${OUTPUT_DIR}/benchmark/
cp ${BASEPATH}/mindspore/lite/include/*.h ${OUTPUT_DIR}/include/
mkdir -p ${OUTPUT_DIR}/include/ir/dtype/
cp ${BASEPATH}/mindspore/core/ir/dtype/type_id.h ${OUTPUT_DIR}/include/ir/dtype/
mkdir -p ${OUTPUT_DIR}/include/schema/
cp ${BASEPATH}/mindspore/lite/schema/*.h ${OUTPUT_DIR}/include/schema/
cp ${BASEPATH}/mindspore/lite/build/src/libmindspore-lite.so ${OUTPUT_DIR}/lib/
mkdir -p ${OUTPUT_DIR}/third_party/protobuf/lib
cp -r ${BASEPATH}/third_party/protobuf/build/include/ ${OUTPUT_DIR}/third_party/protobuf/
cp -r ${BASEPATH}/third_party/protobuf/build/lib/libprotobuf.so.19 ${OUTPUT_DIR}/third_party/protobuf/lib/
cp -r ${BASEPATH}/third_party/protobuf/build/lib/libprotobuf.so.19.0.0 ${OUTPUT_DIR}/third_party/protobuf/lib/
mkdir -p ${OUTPUT_DIR}/third_party/flatbuffers
cp -r ${BASEPATH}/third_party/flatbuffers/include/ ${OUTPUT_DIR}/third_party/flatbuffers/
cd ..
tar -cf MSLite-0.5.0-linux_x86_64.tar.gz MSLite-0.5.0-linux_x86_64/ --warning=no-file-changed
elif [[ "$LITE_PLATFORM" == "arm64" ]]; then
OUTPUT_DIR=${BASEPATH}/mindspore/lite/output/MSLite-0.5.0-linux_arm64
rm -rf ${OUTPUT_DIR} && mkdir -p ${OUTPUT_DIR} && cd ${OUTPUT_DIR}
mkdir -p ${OUTPUT_DIR}/time_profile && mkdir -p ${OUTPUT_DIR}/benchmark
mkdir -p ${OUTPUT_DIR}/include && mkdir -p ${OUTPUT_DIR}/lib
mkdir -p ${OUTPUT_DIR}/third_party
cp ${BASEPATH}/mindspore/lite/build/tools/benchmark/benchmark ${OUTPUT_DIR}/benchmark/
cp ${BASEPATH}/mindspore/lite/include/*.h ${OUTPUT_DIR}/include/
mkdir -p ${OUTPUT_DIR}/include/ir/dtype/
cp ${BASEPATH}/mindspore/core/ir/dtype/type_id.h ${OUTPUT_DIR}/include/ir/dtype/
mkdir -p ${OUTPUT_DIR}/include/schema/
cp ${BASEPATH}/mindspore/lite/schema/*.h ${OUTPUT_DIR}/include/schema/
cp ${BASEPATH}/mindspore/lite/build/src/libmindspore-lite.so ${OUTPUT_DIR}/lib/
mkdir -p ${OUTPUT_DIR}/third_party/flatbuffers
cp -r ${BASEPATH}/third_party/flatbuffers/include/ ${OUTPUT_DIR}/third_party/flatbuffers/
cd ..
tar -cf MSLite-0.5.0-linux_arm64.tar.gz MSLite-0.5.0-linux_arm64/ --warning=no-file-changed
elif [[ "$LITE_PLATFORM" == "arm32" ]]; then
OUTPUT_DIR=${BASEPATH}/mindspore/lite/output/MSLite-0.5.0-linux_arm32
rm -rf ${OUTPUT_DIR} && mkdir -p ${OUTPUT_DIR} && cd ${OUTPUT_DIR}
mkdir -p ${OUTPUT_DIR}/time_profile && mkdir -p ${OUTPUT_DIR}/benchmark
mkdir -p ${OUTPUT_DIR}/include && mkdir -p ${OUTPUT_DIR}/lib
mkdir -p ${OUTPUT_DIR}/third_party
cp ${BASEPATH}/mindspore/lite/build/tools/benchmark/benchmark ${OUTPUT_DIR}/benchmark/
cp ${BASEPATH}/mindspore/lite/include/*.h ${OUTPUT_DIR}/include/
mkdir -p ${OUTPUT_DIR}/include/ir/dtype/
cp ${BASEPATH}/mindspore/core/ir/dtype/type_id.h ${OUTPUT_DIR}/include/ir/dtype/
mkdir -p ${OUTPUT_DIR}/include/schema/
cp ${BASEPATH}/mindspore/lite/schema/*.h ${OUTPUT_DIR}/include/schema/
cp ${BASEPATH}/mindspore/lite/build/src/libmindspore-lite.so ${OUTPUT_DIR}/lib/
mkdir -p ${OUTPUT_DIR}/third_party/flatbuffers
cp -r ${BASEPATH}/third_party/flatbuffers/include/ ${OUTPUT_DIR}/third_party/flatbuffers/
cd ..
tar -cf MSLite-0.5.0-linux_arm32.tar.gz MSLite-0.5.0-linux_arm32/ --warning=no-file-changed
fi
echo "---------------- mindspore lite: build success ----------------"
fi
}
if [[ "X$COMPILE_LITE" = "Xon" ]]; then
build_lite
exit exit
else else
build_mindspore build_mindspore

View File

@ -1,7 +1,3 @@
file(GLOB_RECURSE _IR_SRC_LIST ./*.cc dtype/*.cc) file(GLOB_RECURSE _IR_SRC_LIST ./*.cc dtype/*.cc)
file(GLOB_RECURSE _IR_LITE_SRC_FILES
./lite/tensor.cc
)
list(REMOVE_ITEM _IR_SRC_LIST ${_IR_LITE_SRC_FILES})
set_property(SOURCE ${_IR_SRC_LIST} PROPERTY COMPILE_DEFINITIONS SUBMODULE_ID=mindspore::SubModuleId::SM_IR) set_property(SOURCE ${_IR_SRC_LIST} PROPERTY COMPILE_DEFINITIONS SUBMODULE_ID=mindspore::SubModuleId::SM_IR)
add_library(_mindspore_ir_obj OBJECT ${_IR_SRC_LIST}) add_library(_mindspore_ir_obj OBJECT ${_IR_SRC_LIST})

View File

@ -1,88 +0,0 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <vector>
#include <utility>
#include "ir/lite/tensor.h"
#include "securec/include/securec.h"
namespace mindspore {
namespace tensor {
#define kMaxMallocSize 1024 * 1024 * 100
Tensor::Tensor(const TypeId data_type, const std::vector<int> &shape) : MetaTensor(data_type, shape) {}
Tensor::Tensor(const TypePtr &type_ptr, const std::vector<int> &shape) : MetaTensor(type_ptr, shape) {}
Tensor::Tensor(const Tensor &tensor) : MetaTensor(tensor) {
this->data_type_ = tensor.data_type_;
this->shape_ = tensor.shape_;
auto ret = CopyTensorData(tensor);
if (0 != ret) {
MS_LOG(EXCEPTION) << "CopyTensorData error";
}
}
int Tensor::CopyTensorData(const Tensor &srcTensor) {
if (srcTensor.data_ == nullptr) {
MS_LOG(ERROR) << "data of srcTensor is nullptr";
return -1;
}
size_t data_size = this->Size();
MS_ASSERT(data_size == tensor.Size());
if (this->data_ == nullptr) {
if (data_size > kMaxMallocSize) {
MS_LOG(ERROR) << "Malloc size is too big while coping data, " << data_size << " bytes";
return -1;
}
this->data_ = malloc(data_size);
}
memcpy_s(this->data_, data_size, tensor.data_, tensor.Size());
return 0;
}
Tensor::~Tensor() {
if (nullptr != this->data_) {
free(this->data_);
}
}
Tensor &Tensor::operator=(const Tensor &tensor) {
if (&tensor == this) {
return *this;
}
this->shape_ = tensor.shape_;
this->data_type_ = tensor.data_type_;
auto ret = CopyTensorData(tensor);
if (0 != ret) {
MS_LOG(EXCEPTION) << "CopyTensorData error";
}
return *this;
}
bool Tensor::operator==(const Tensor &tensor) {
return data_ == tensor.data_ && shape_ == tensor.shape_ && data_type_ == tensor.data_type_;
}
bool Tensor::operator==(const Value &other) const {
if (other.isa<Tensor>()) {
auto other_ = static_cast<const Tensor &>(other);
return *this == other_;
} else {
return false;
}
}
} // namespace tensor
} // namespace mindspore

View File

@ -1,61 +0,0 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CORE_IR_LITE_TENSOR_H_
#define MINDSPORE_CORE_IR_LITE_TENSOR_H_
#include <memory>
#include <vector>
#include "ir/meta_tensor.h"
#include "ir/dtype/type.h"
namespace mindspore {
namespace tensor {
class Tensor : public MetaTensor {
public:
Tensor() : MetaTensor() {}
Tensor(const TypeId data_type, const std::vector<int> &shape);
Tensor(const TypePtr &type_ptr, const std::vector<int> &shape);
Tensor(const Tensor &tensor);
~Tensor();
int CopyTensorData(const Tensor &srcTensor);
MS_DECLARE_PARENT(Tensor, MetaTensor)
virtual Tensor &operator=(const Tensor &tensor);
virtual bool operator==(const Tensor &tensor);
bool operator==(const Value &other) const override;
size_t Size() const { return MetaTensor::ElementsNum() * GetTypeByte(TypeIdToType(this->data_type_)); }
void *Data() const { return data_; }
protected:
void *data_;
};
using TensorPtr = std::shared_ptr<Tensor>;
} // namespace tensor
} // namespace mindspore
#endif // MINDSPORE_CORE_IR_LITE_TENSOR_H_

View File

@ -75,8 +75,6 @@ int MetaTensor::ElementsNum() const {
return std::accumulate(shape_.begin(), shape_.end(), 1LL, std::multiplies<int>()); return std::accumulate(shape_.begin(), shape_.end(), 1LL, std::multiplies<int>());
} }
TypePtr MetaTensor::Dtype() const { return TypeIdToType(data_type_); }
TypePtr MetaTensor::SetDtype(const TypePtr type_ptr) { TypePtr MetaTensor::SetDtype(const TypePtr type_ptr) {
if (type_ptr == nullptr) { if (type_ptr == nullptr) {
MS_LOG(ERROR) << "Dtype to be set is nullptr."; MS_LOG(ERROR) << "Dtype to be set is nullptr.";

View File

@ -37,5 +37,7 @@ abstract::AbstractBasePtr MetaTensor::ToAbstract() {
abs_tensor->set_value(shared_from_base<MetaTensor>()); abs_tensor->set_value(shared_from_base<MetaTensor>());
return abs_tensor; return abs_tensor;
} }
TypePtr MetaTensor::Dtype() const { return TypeIdToType(data_type_); }
} // namespace tensor } // namespace tensor
} // namespace mindspore } // namespace mindspore

View File

@ -31,7 +31,7 @@ class ParamValue {
ParamValue(const ParamValue &other) = default; ParamValue(const ParamValue &other) = default;
~ParamValue() = default; virtual ~ParamValue() = default;
tensor::MetaTensorPtr value() const { return value_; } tensor::MetaTensorPtr value() const { return value_; }
void set_value(const tensor::MetaTensorPtr &value) { value_ = value; } void set_value(const tensor::MetaTensorPtr &value) { value_ = value; }

View File

@ -17,11 +17,15 @@
#include "utils/log_adapter.h" #include "utils/log_adapter.h"
#include <unistd.h> #include <unistd.h>
#include <sys/time.h>
#include <map> #include <map>
#ifndef USE_ANDROID_LOG
#include "debug/trace.h" #include "debug/trace.h"
#endif
// namespace to support utils module definition // namespace to support utils module definition
namespace mindspore { namespace mindspore {
#ifndef USE_ANDROID_LOG
#ifdef USE_GLOG #ifdef USE_GLOG
static std::string GetTime() { static std::string GetTime() {
#define BUFLEN 80 #define BUFLEN 80
@ -125,6 +129,7 @@ static int GetSlogLevel(MsLogLevel level) {
} }
} }
#endif #endif
#endif
static std::string ExceptionTypeToString(ExceptionType type) { static std::string ExceptionTypeToString(ExceptionType type) {
#define _TO_STRING(x) #x #define _TO_STRING(x) #x
@ -184,7 +189,24 @@ static const char *GetSubModuleName(SubModuleId module_id) {
return sub_module_names[module_id % NUM_SUBMODUES]; return sub_module_names[module_id % NUM_SUBMODUES];
} }
const char *EnumStrForMsLogLevel(MsLogLevel level) {
if (level == DEBUG) {
return "DEBUG";
} else if (level == INFO) {
return "INFO";
} else if (level == WARNING) {
return "WARNING";
} else if (level == ERROR) {
return "ERROR";
} else if (level == EXCEPTION) {
return "EXCEPTION";
} else {
return "NO_LEVEL";
}
}
void LogWriter::OutputLog(const std::ostringstream &msg) const { void LogWriter::OutputLog(const std::ostringstream &msg) const {
#ifndef USE_ANDROID_LOG
#ifdef USE_GLOG #ifdef USE_GLOG
auto submodule_name = GetSubModuleName(submodule_); auto submodule_name = GetSubModuleName(submodule_);
google::LogMessage("", 0, GetGlogLevel(log_level_)).stream() google::LogMessage("", 0, GetGlogLevel(log_level_)).stream()
@ -197,6 +219,10 @@ void LogWriter::OutputLog(const std::ostringstream &msg) const {
Dlog(static_cast<int>(slog_module_id), GetSlogLevel(log_level_), "[%s:%d] %s] %s", location_.file_, location_.line_, Dlog(static_cast<int>(slog_module_id), GetSlogLevel(log_level_), "[%s:%d] %s] %s", location_.file_, location_.line_,
location_.func_, str_msg.c_str()); location_.func_, str_msg.c_str());
#endif #endif
#else
printf("%s [%s:%d] %s] %s\n:", EnumStrForMsLogLevel(log_level_), location_.file_, location_.line_, location_.func_,
msg.str().c_str());
#endif
} }
void LogWriter::operator<(const LogStream &stream) const noexcept { void LogWriter::operator<(const LogStream &stream) const noexcept {
@ -218,8 +244,10 @@ void LogWriter::operator^(const LogStream &stream) const {
} }
oss << msg.str(); oss << msg.str();
#ifndef USE_ANDROID_LOG
trace::TraceGraphEval(); trace::TraceGraphEval();
trace::GetEvalStackInfo(oss); trace::GetEvalStackInfo(oss);
#endif
if (exception_handler_ != nullptr) { if (exception_handler_ != nullptr) {
exception_handler_(exception_type_, oss.str()); exception_handler_(exception_type_, oss.str());

View File

@ -25,11 +25,13 @@
#include <functional> #include <functional>
#include "utils/overload.h" #include "utils/overload.h"
#include "./securec.h" #include "./securec.h"
#ifndef USE_ANDROID_LOG
#ifdef USE_GLOG #ifdef USE_GLOG
#include "glog/logging.h" #include "glog/logging.h"
#else #else
#include "toolchain/slog.h" #include "toolchain/slog.h"
#endif #endif
#endif
// NOTICE: when relative path of 'log_adapter.h' changed, macro 'LOG_HDR_FILE_REL_PATH' must be changed // NOTICE: when relative path of 'log_adapter.h' changed, macro 'LOG_HDR_FILE_REL_PATH' must be changed
#define LOG_HDR_FILE_REL_PATH "mindspore/core/utils/log_adapter.h" #define LOG_HDR_FILE_REL_PATH "mindspore/core/utils/log_adapter.h"
@ -129,6 +131,8 @@ enum SubModuleId : int {
#define SUBMODULE_ID mindspore::SubModuleId::SM_ME #define SUBMODULE_ID mindspore::SubModuleId::SM_ME
#endif #endif
const char *EnumStrForMsLogLevel(MsLogLevel level);
#if defined(_WIN32) || defined(_WIN64) #if defined(_WIN32) || defined(_WIN64)
extern int g_ms_submodule_log_levels[] __attribute__((dllexport)); extern int g_ms_submodule_log_levels[] __attribute__((dllexport));
#else #else

View File

@ -0,0 +1,119 @@
cmake_minimum_required(VERSION 3.14)
project (Lite)
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.3.0)
message(FATAL_ERROR "GCC vesion ${CMAKE_CXX_COMPILER_VERSION} must not be less than 7.3.0")
endif ()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17")
set(TOP_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../..)
set(CORE_DIR ${TOP_DIR}/mindspore/core)
set(CCSRC_DIR ${TOP_DIR}/mindspore/ccsrc)
include_directories(${TOP_DIR})
include_directories(${CORE_DIR})
include_directories(${CCSRC_DIR})
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
include_directories(${TOP_DIR}/third_party)
include_directories(${TOP_DIR}/third_party/flatbuffers/include)
include(${TOP_DIR}/cmake/utils.cmake)
include(${TOP_DIR}/cmake/external_libs/json.cmake)
include(${TOP_DIR}/cmake/dependency_securec.cmake)
set(CMAKE_VERBOSE_MAKEFILE on)
add_compile_definitions(USE_ANDROID_LOG)
add_compile_definitions(NO_DLIB)
add_compile_options(-fPIC)
option(BUILD_DEVICE "if build device" on)
option(SUPPORT_TRAIN "if build for on-device train" off)
option(PLATFORM_ARM64 "if build device for arm64" off)
option(PLATFORM_ARM32 "if build device for arm32" off)
option(BUILD_CONVERTER "if build converter" on)
option(ENABLE_FP16 "if build fp16 ops" off)
option(SUPPORT_GPU "if support gpu" off)
option(OFFLINE_COMPILE "if offline compile OpenCL kernel" off)
if (BUILD_DEVICE)
add_compile_definitions(BUILD_DEVICE)
endif()
if (SUPPORT_TRAIN)
add_compile_definitions(SUPPORT_TRAIN)
endif()
if (ENABLE_NEON)
add_compile_definitions(ENABLE_NEON)
endif ()
if (ENABLE_FP16)
add_compile_definitions(ENABLE_FP16)
endif ()
if (SUPPORT_GPU)
add_definitions(-DUSE_OPENCL_WRAPPER)
add_definitions(-DMS_OPENCL_PROFILE=false)
add_definitions(-DCL_HPP_TARGET_OPENCL_VERSION=200)
add_compile_definitions(SUPPORT_GPU)
if(OFFLINE_COMPILE)
add_compile_definitions(PROGRAM_WITH_IL)
endif()
include_directories(${TOP_DIR}/third_party/OpenCL-Headers)
include_directories(${TOP_DIR}/third_party/OpenCL-CLHPP/include)
endif()
set(ANF_SRC
${CMAKE_CURRENT_SOURCE_DIR}/../core/ir/meta_tensor.cc
${CCSRC_DIR}/gvar/logging_level.cc
${CCSRC_DIR}/gvar/typeid_manager.cc
${CMAKE_CURRENT_SOURCE_DIR}/../core/base/base.cc
${CMAKE_CURRENT_SOURCE_DIR}/../core/utils/log_adapter.cc
)
if (BUILD_CONVERTER)
if (PLATFORM_ARM64 OR PLATFORM_ARM32)
MESSAGE(FATAL_ERROR "Cannot build converter in arm platform")
endif()
find_package(Python3 3.7 COMPONENTS Interpreter Development)
if(Python3_FOUND)
set(PYTHON_INCLUDE_DIRS "${Python3_INCLUDE_DIRS}")
set(PYTHON_LIBRARIES "${Python3_LIBRARIES}")
if (WIN32)
if (Python3_DIR)
message("Python3_DIR set already: " ${Python3_DIR})
else()
string(LENGTH ${PYTHON_LIBRARIES} PYTHON_LIBRARIES_LEN)
string(LENGTH "libpythonxx.a" Python3_NAME_LEN)
math(EXPR Python3_DIR_LEN ${PYTHON_LIBRARIES_LEN}-${Python3_NAME_LEN})
string(SUBSTRING ${Python3_LIBRARIES} 0 ${Python3_DIR_LEN} Python3_DIR)
message("Python3_DIR: " ${Python3_DIR})
endif()
link_directories(${Python3_DIR})
endif()
else()
find_python_package(py_inc py_lib)
set(PYTHON_INCLUDE_DIRS "${py_inc}")
set(PYTHON_LIBRARIES "${py_lib}")
endif()
include_directories(${PYTHON_INCLUDE_DIRS})
include(${TOP_DIR}/cmake/external_libs/pybind11.cmake)
include(${TOP_DIR}/cmake/external_libs/eigen.cmake)
include_directories(${TOP_DIR}/third_party/protobuf/build/include)
link_directories(${TOP_DIR}/third_party/protobuf/build/lib)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/tools/converter)
add_subdirectory(src/common/anf_exporter)
endif()
if (BUILD_DEVICE)
if (PLATFORM_ARM32 OR PLATFORM_ARM64)
if (NOT DEFINED ENV{ANDROID_NDK})
message(FATAL_ERROR "env ANDROID_NDK should be setted for ARM compile")
endif()
add_compile_definitions(ENABLE_ARM)
endif()
if (PLATFORM_ARM32)
add_definitions(-mfloat-abi=softfp -mfpu=neon)
add_compile_definitions(ENABLE_ARM32)
endif()
if (PLATFORM_ARM64)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=armv8.2-a+dotprod+fp16")
add_compile_definitions(ENABLE_ARM64)
endif()
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/src)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/tools/benchmark)
# add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/test)
endif()

272
mindspore/lite/build.sh Executable file
View File

@ -0,0 +1,272 @@
#!/usr/bin/env bash
set -e
CUR_DIR=$(cd "$(dirname $0)"; pwd)
BASE_DIR=${CUR_DIR}/../../
usage()
{
echo "Usage:"
echo "bash build.sh [-d] [-a arm64|arm32] [-j[n]] [-m] [-f] [-g] [-c] [-s] [-o]"
echo ""
echo "Options:"
echo " -d Enable Debug"
echo " -c Enable compile converter, default off"
echo " -m Enable Incremental compilation"
echo " -a Select ARM platform, default off"
echo " -j[n] Set the threads when building, default: -j8"
echo " -f Compile fp16 ops"
echo " -g Enable gpu compile"
echo " -s Support train"
echo " -o Offline compile OpenCL kernel"
}
checkopts()
{
# Init default values of build options
THREAD_NUM="8"
BUILD_TYPE="Release"
BUILD_DEVICE_PLATFORM="off"
MAKE_ONLY="off"
ENABLE_FP16="off"
ENABLE_GPU="off"
ENABLE_CONVERTER="off"
SUPPORT_TRAIN="off"
OFFLINE_COMPILE="off"
# Process the options
while getopts 'j:da:mfcsgo' opt
do
OPTARG=$(echo ${OPTARG} | tr '[A-Z]' '[a-z]')
case "${opt}" in
m)
MAKE_ONLY="on"
echo "Incremental compilation"
;;
d)
BUILD_TYPE="Debug"
echo "Build Debug version"
;;
j)
THREAD_NUM=$OPTARG
;;
a)
if [[ "X$OPTARG" == "Xarm64" ]]; then
BUILD_DEVICE_PLATFORM="arm64"
echo "Enable arm64"
elif [[ "X$OPTARG" == "Xarm32" ]]; then
BUILD_DEVICE_PLATFORM="arm32"
echo "Enable arm32"
else
echo "-I parameter must be arm64 or arm32"
exit 1
fi
;;
c)
ENABLE_CONVERTER="on"
echo "Enable converter"
;;
s)
SUPPORT_TRAIN="on"
echo "Support train"
;;
f)
ENABLE_FP16="on"
echo "Enable fp16"
;;
g)
ENABLE_GPU="on"
echo "Enable gpu"
;;
o)
OFFLINE_COMPILE="on"
echo "OpenCL kernel offline compile"
;;
*)
echo "Unknown option ${opt}!"
usage
exit 1
esac
done
}
checkndk() {
if [ "${ANDROID_NDK}" ]; then
echo -e "\e[31mANDROID_NDK_PATH=$ANDROID_NDK \e[0m"
else
echo -e "\e[31mplease set ANDROID_NDK_PATH in environment variable for example: export ANDROID_NDK=/root/usr/android-ndk-r16b/ \e[0m"
exit 1
fi
}
gene_flatbuffer() {
FLAT_DIR="${BASE_DIR}/mindspore/lite/schema"
cd ${FLAT_DIR} && rm -rf "${FLAT_DIR}/inner" && mkdir -p "${FLAT_DIR}/inner"
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b --reflect-types --gen-mutable --reflect-names --gen-object-api -o "${FLAT_DIR}/inner"
FLAT_DIR="${BASE_DIR}/mindspore/lite/tools/converter/parser/tflite"
cd ${FLAT_DIR}
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b --reflect-types --gen-mutable --reflect-names --gen-object-api -o "${FLAT_DIR}/"
}
build_flatbuffer() {
cd ${BASE_DIR}
FLATC="${BASE_DIR}"/third_party/flatbuffers/build/flatc
if [[ ! -f "${FLATC}" ]]; then
git submodule update --init --recursive third_party/flatbuffers
cd ${BASE_DIR}/third_party/flatbuffers
rm -rf build && mkdir -pv build && cd build && cmake .. && make -j$THREAD_NUM
gene_flatbuffer
fi
if [[ "${MAKE_ONLY}" == "off" ]]; then
gene_flatbuffer
fi
}
gene_protobuf() {
PROTO_SRC_DIR="${BASE_DIR}/mindspore/lite/tools/converter/parser/caffe"
find ${PROTO_SRC_DIR} -name "*.proto" -print0 | xargs -0 "${PROTOC}" -I"${PROTO_SRC_DIR}" --cpp_out="${PROTO_SRC_DIR}"
PROTO_SRC_DIR="${BASE_DIR}/mindspore/lite/tools/converter/parser/onnx"
find ${PROTO_SRC_DIR} -name "*.proto" -print0 | xargs -0 "${PROTOC}" -I"${PROTO_SRC_DIR}" --cpp_out="${PROTO_SRC_DIR}"
}
build_protobuf() {
cd ${BASE_DIR}
PROTOC="${BASE_DIR}"/third_party/protobuf/build/bin/protoc
if [[ ! -f "${PROTOC}" ]]; then
git submodule update --init --recursive third_party/protobuf
cd ${BASE_DIR}/third_party/protobuf
rm -rf build && mkdir -pv build && ./autogen.sh
./configure --prefix=${BASE_DIR}/third_party/protobuf/build
make clean && make -j$THREAD_NUM && make install
gene_protobuf
fi
if [[ "${MAKE_ONLY}" == "off" ]]; then
gene_protobuf
fi
}
build_gtest() {
cd ${BASE_DIR}
git submodule update --init --recursive third_party/googletest
}
gene_clhpp() {
CL_SRC_DIR="${BASE_DIR}/mindspore/lite/src/runtime/kernel/opencl/cl"
for sub_dir in "${CL_SRC_DIR}"/*
do
data_type="$(basename ${sub_dir})"
if [ ! -d ${CL_SRC_DIR}/${data_type} ]; then
continue
fi
cd ${CL_SRC_DIR}/${data_type}
rm -rf *.inc
echo "$(cd "$(dirname $0)"; pwd)"
for file_path in "${CL_SRC_DIR}/${data_type}"/*
do
file="$(basename ${file_path})"
inc_file=`echo ${CL_SRC_DIR}/${data_type}/${file} | sed 's/$/.inc/'`
sed 's/^/\"/;s/$/ \\n\" \\/' ${CL_SRC_DIR}/${data_type}/${file} > ${inc_file}
kernel_name=`echo ${file} | sed s'/.\{3\}$//'`
sed -i "1i\static const char *${kernel_name}_source_${data_type} =\"\\n\" \\" ${inc_file}
sed -i '$a\;' ${inc_file}
done
done
}
gene_ocl_program() {
CL_SRC_DIR="${BASE_DIR}/mindspore/lite/src/runtime/kernel/opencl/cl"
SPIRV_DIR=build/spirv
rm -rf ${SPIRV_DIR}
mkdir -pv ${SPIRV_DIR}
for sub_dir in "${CL_SRC_DIR}"/*
do
data_type="$(basename ${sub_dir})"
if [ ! -d ${CL_SRC_DIR}/${data_type} ]; then
continue
fi
#echo $(cd "$(dirname $0)"; pwd)
for file_path in "${CL_SRC_DIR}/${data_type}"/*
do
file="$(basename ${file_path})"
if [ "${file##*.}" != "cl" ]; then
continue
fi
clang -Xclang -finclude-default-header -cl-std=CL2.0 --target=spir64-unknown-unknown -emit-llvm \
-c -O0 -o ${SPIRV_DIR}/${file%.*}.bc ${CL_SRC_DIR}/${data_type}/${file}
done
done
bcs=`ls ${SPIRV_DIR}/*.bc`
llvm-link ${bcs} -o ${SPIRV_DIR}/program.bc
llvm-spirv -o ${SPIRV_DIR}/program.spv ${SPIRV_DIR}/program.bc
CL_PROGRAM_PATH="${BASE_DIR}/mindspore/lite/src/runtime/kernel/opencl/cl/program.inc"
echo "#include <vector>" > ${CL_PROGRAM_PATH}
echo "std::vector<unsigned char> g_program_binary = {" >> ${CL_PROGRAM_PATH}
#hexdump -v -e '16/1 "0x%02x, " "\n"' ${SPIRV_DIR}/program.spv >> ${CL_PROGRAM_PATH}
hexdump -v -e '1/1 "0x%02x, "' ${SPIRV_DIR}/program.spv >> ${CL_PROGRAM_PATH}
echo "};" >> ${CL_PROGRAM_PATH}
echo "Compile SPIRV done"
}
build_opencl() {
cd ${BASE_DIR}
git submodule update --init third_party/OpenCL-Headers
git submodule update --init third_party/OpenCL-CLHPP
if [[ "${OFFLINE_COMPILE}" == "on" ]]; then
gene_ocl_program
else
gene_clhpp
fi
}
buildlite() {
if [[ "${MAKE_ONLY}" == "off" ]]; then
cd ${CUR_DIR}
rm -rf build
mkdir -pv build
cd build
if [[ "${BUILD_DEVICE_PLATFORM}" == "arm64" ]]; then
checkndk
cmake -DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" -DANDROID_NATIVE_API_LEVEL="19" \
-DANDROID_NDK="${ANDROID_NDK}" -DANDROID_ABI="arm64-v8a" -DANDROID_TOOLCHAIN_NAME="aarch64-linux-android-clang" \
-DANDROID_STL="c++_shared" -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DSUPPORT_TRAIN=${SUPPORT_TRAIN} \
-DBUILD_DEVICE=on -DPLATFORM_ARM64=on -DBUILD_CONVERTER=off -DENABLE_NEON=on -DENABLE_FP16="${ENABLE_FP16}" \
-DSUPPORT_GPU=${ENABLE_GPU} -DOFFLINE_COMPILE=${OFFLINE_COMPILE} ..
elif [[ "${BUILD_DEVICE_PLATFORM}" == "arm32" ]]; then
checkndk
cmake -DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" -DANDROID_NATIVE_API_LEVEL="19" \
-DANDROID_NDK="${ANDROID_NDK}" -DANDROID_ABI="armeabi-v7a" -DANDROID_TOOLCHAIN_NAME="clang" \
-DANDROID_STL="c++_shared" -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-DBUILD_DEVICE=on -DPLATFORM_ARM32=on -DENABLE_NEON=on -DSUPPORT_TRAIN=${SUPPORT_TRAIN} -DBUILD_CONVERTER=off \
-DSUPPORT_GPU=${ENABLE_GPU} -DOFFLINE_COMPILE=${OFFLINE_COMPILE} ..
else
cmake -DBUILD_DEVICE=on -DPLATFORM_ARM64=off -DBUILD_CONVERTER=${ENABLE_CONVERTER} -DSUPPORT_TRAIN=${SUPPORT_TRAIN} \
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DSUPPORT_GPU=${ENABLE_GPU} -DOFFLINE_COMPILE=${OFFLINE_COMPILE} ..
fi
else
cd ${CUR_DIR}/build
fi
VERBOSE=2 make -j$THREAD_NUM
}
echo "---------------- mindspore lite: build start ----------------"
checkopts "$@"
build_flatbuffer
if [[ "${ENABLE_CONVERTER}" == "on" ]]; then
build_protobuf
fi
if [[ "${ENABLE_GPU}" == "on" ]]; then
build_opencl
fi
build_gtest
buildlite
COMPILE_RET=$?
if [[ "${COMPILE_RET}" -ne 0 ]]; then
echo "---------------- mindspore lite: build failed ----------------"
else
echo "---------------- mindspore lite: build success ----------------"
fi

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,7 @@
# CMake generated Testfile for
# Source directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest
# Build directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest
#
# This file includes the relevant testing commands required for
# testing this directory and lists subdirectories to be tested as well.
subdirs("googlemock")

View File

@ -0,0 +1,7 @@
# CMake generated Testfile for
# Source directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock
# Build directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock
#
# This file includes the relevant testing commands required for
# testing this directory and lists subdirectories to be tested as well.
subdirs("gtest")

View File

@ -0,0 +1,517 @@
<?xml version="1.0" encoding="UTF-8"?>
<CodeBlocks_project_file>
<FileVersion major="1" minor="6"/>
<Project>
<Option title="gmock"/>
<Option makefile_is_custom="1"/>
<Option compiler="gcc"/>
<Option virtualFolders="CMake Files\;CMake Files\..\;CMake Files\..\..\;CMake Files\..\..\cmake\;CMake Files\..\..\cmake\external_libs\;CMake Files\..\..\..\;CMake Files\..\..\..\..\;CMake Files\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\home\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\;CMake Files\..\..\third_party\;CMake Files\..\..\third_party\securec\;CMake Files\..\..\third_party\securec\src\;CMake Files\..\..\third_party\googletest\;CMake Files\..\..\third_party\googletest\googlemock\;CMake Files\..\..\third_party\googletest\googlemock\cmake\;CMake Files\..\..\third_party\googletest\googletest\;CMake Files\..\..\third_party\googletest\googletest\cmake\;CMake Files\tools\;CMake Files\tools\converter\;CMake Files\tools\converter\parser\;CMake Files\tools\converter\parser\caffe\;CMake Files\tools\converter\parser\tflite\;CMake Files\tools\converter\optimizer\;CMake Files\tools\converter\optimizer\fusion\;CMake Files\tools\converter\optimizer\node\;CMake Files\tools\converter\optimizer\graph\;CMake Files\tools\converter\quantizer\;CMake Files\tools\benchmark\;CMake Files\src\;CMake Files\src\common\;CMake Files\src\common\anf_exporter\;CMake Files\src\runtime\;CMake Files\src\runtime\kernel\;CMake Files\src\runtime\kernel\arm\;CMake Files\src\runtime\kernel\arm\opclib\;CMake Files\src\ops\;CMake Files\test\;"/>
<Build>
<Target title="all">
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock"/>
<Option type="4"/>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 all"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock_main">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/libgmock_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock_main"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock_main/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/libgmock_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock_main/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/libgmockd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/libgmockd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
</Build>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/src/gmock-all.cc">
<Option target="gmock"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/src/gmock_main.cc">
<Option target="gmock_main"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest-all.cc">
<Option target="gtest"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest_main.cc">
<Option target="gtest_main"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/CMakeLists.txt">
<Option virtualFolder="CMake Files\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_gtest.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_securec.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/utils.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/eigen.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/json.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/pybind11.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/FindPythonLibsNew.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Tools.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/Config.cmake.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/internal_utils.cmake">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/caffe/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\caffe\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/tflite/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\tflite\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/fusion/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\fusion\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/node/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\node\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/graph/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\graph\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/quantizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\quantizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/benchmark/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\benchmark\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/common/anf_exporter/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\common\anf_exporter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/opclib/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\opclib\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/ops/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\ops\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/test/CMakeLists.txt">
<Option virtualFolder="CMake Files\test\"/>
</Unit>
</Project>
</CodeBlocks_project_file>

View File

@ -0,0 +1,6 @@
# CMake generated Testfile for
# Source directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest
# Build directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest
#
# This file includes the relevant testing commands required for
# testing this directory and lists subdirectories to be tested as well.

View File

@ -0,0 +1,33 @@
####### Expanded from @PACKAGE_INIT@ by configure_package_config_file() #######
####### Any changes to this file will be overwritten by the next CMake run ####
####### The input file was Config.cmake.in ########
get_filename_component(PACKAGE_PREFIX_DIR "${CMAKE_CURRENT_LIST_DIR}/../../../" ABSOLUTE)
macro(set_and_check _var _file)
set(${_var} "${_file}")
if(NOT EXISTS "${_file}")
message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !")
endif()
endmacro()
macro(check_required_components _NAME)
foreach(comp ${${_NAME}_FIND_COMPONENTS})
if(NOT ${_NAME}_${comp}_FOUND)
if(${_NAME}_FIND_REQUIRED_${comp})
set(${_NAME}_FOUND FALSE)
endif()
endif()
endforeach()
endmacro()
####################################################################################
include(CMakeFindDependencyMacro)
if (ON)
set(THREADS_PREFER_PTHREAD_FLAG ON)
find_dependency(Threads)
endif()
include("${CMAKE_CURRENT_LIST_DIR}/GTestTargets.cmake")
check_required_components("")

View File

@ -0,0 +1,37 @@
# This is a basic version file for the Config-mode of find_package().
# It is used by write_basic_package_version_file() as input file for configure_file()
# to create a version-file which can be installed along a config.cmake file.
#
# The created file sets PACKAGE_VERSION_EXACT if the current version string and
# the requested version string are exactly the same and it sets
# PACKAGE_VERSION_COMPATIBLE if the current version is >= requested version.
# The variable CVF_VERSION must be set before calling configure_file().
set(PACKAGE_VERSION "1.9.0")
if(PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION)
set(PACKAGE_VERSION_COMPATIBLE FALSE)
else()
set(PACKAGE_VERSION_COMPATIBLE TRUE)
if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION)
set(PACKAGE_VERSION_EXACT TRUE)
endif()
endif()
# if the installed project requested no architecture check, don't perform the check
if("FALSE")
return()
endif()
# if the installed or the using project don't have CMAKE_SIZEOF_VOID_P set, ignore it:
if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "" OR "8" STREQUAL "")
return()
endif()
# check that the installed version has the same 32/64bit-ness as the one which is currently searching:
if(NOT CMAKE_SIZEOF_VOID_P STREQUAL "8")
math(EXPR installedBits "8 * 8")
set(PACKAGE_VERSION "${PACKAGE_VERSION} (${installedBits}bit)")
set(PACKAGE_VERSION_UNSUITABLE TRUE)
endif()

View File

@ -0,0 +1,9 @@
libdir=/usr/local/lib
includedir=/usr/local/include
Name: gmock
Description: GoogleMock (without main() function)
Version: 1.9.0
URL: https://github.com/google/googletest
Libs: -L${libdir} -lgmock -pthread
Cflags: -I${includedir} -DGTEST_HAS_PTHREAD=1 -pthread

View File

@ -0,0 +1,9 @@
libdir=/usr/local/lib
includedir=/usr/local/include
Name: gmock_main
Description: GoogleMock (with main() function)
Version: 1.9.0
URL: https://github.com/google/googletest
Libs: -L${libdir} -lgmock_main -pthread
Cflags: -I${includedir} -DGTEST_HAS_PTHREAD=1 -pthread

View File

@ -0,0 +1,9 @@
libdir=/usr/local/lib
includedir=/usr/local/include
Name: gtest
Description: GoogleTest (without main() function)
Version: 1.9.0
URL: https://github.com/google/googletest
Libs: -L${libdir} -lgtest -pthread
Cflags: -I${includedir} -DGTEST_HAS_PTHREAD=1 -pthread

View File

@ -0,0 +1,10 @@
libdir=/usr/local/lib
includedir=/usr/local/include
Name: gtest_main
Description: GoogleTest (with main() function)
Version: 1.9.0
URL: https://github.com/google/googletest
Requires: gtest
Libs: -L${libdir} -lgtest_main -pthread
Cflags: -I${includedir} -DGTEST_HAS_PTHREAD=1 -pthread

View File

@ -0,0 +1,327 @@
<?xml version="1.0" encoding="UTF-8"?>
<CodeBlocks_project_file>
<FileVersion major="1" minor="6"/>
<Project>
<Option title="gtest"/>
<Option makefile_is_custom="1"/>
<Option compiler="gcc"/>
<Option virtualFolders="CMake Files\;CMake Files\..\;CMake Files\..\..\;CMake Files\..\..\cmake\;CMake Files\..\..\cmake\external_libs\;CMake Files\..\..\..\;CMake Files\..\..\..\..\;CMake Files\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\home\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\;CMake Files\..\..\third_party\;CMake Files\..\..\third_party\securec\;CMake Files\..\..\third_party\securec\src\;CMake Files\..\..\third_party\googletest\;CMake Files\..\..\third_party\googletest\googlemock\;CMake Files\..\..\third_party\googletest\googlemock\cmake\;CMake Files\..\..\third_party\googletest\googletest\;CMake Files\..\..\third_party\googletest\googletest\cmake\;CMake Files\tools\;CMake Files\tools\converter\;CMake Files\tools\converter\parser\;CMake Files\tools\converter\parser\caffe\;CMake Files\tools\converter\parser\tflite\;CMake Files\tools\converter\optimizer\;CMake Files\tools\converter\optimizer\fusion\;CMake Files\tools\converter\optimizer\node\;CMake Files\tools\converter\optimizer\graph\;CMake Files\tools\converter\quantizer\;CMake Files\tools\benchmark\;CMake Files\src\;CMake Files\src\common\;CMake Files\src\common\anf_exporter\;CMake Files\src\runtime\;CMake Files\src\runtime\kernel\;CMake Files\src\runtime\kernel\arm\;CMake Files\src\runtime\kernel\arm\opclib\;CMake Files\src\ops\;CMake Files\test\;"/>
<Build>
<Target title="all">
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option type="4"/>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 all"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
</Build>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest-all.cc">
<Option target="gtest"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest_main.cc">
<Option target="gtest_main"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/CMakeLists.txt">
<Option virtualFolder="CMake Files\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_gtest.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_securec.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/utils.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/eigen.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/json.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/pybind11.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/FindPythonLibsNew.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Tools.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/Config.cmake.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/internal_utils.cmake">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/caffe/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\caffe\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/tflite/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\tflite\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/fusion/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\fusion\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/node/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\node\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/graph/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\graph\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/quantizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\quantizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/benchmark/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\benchmark\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/common/anf_exporter/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\common\anf_exporter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/opclib/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\opclib\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/ops/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\ops\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/test/CMakeLists.txt">
<Option virtualFolder="CMake Files\test\"/>
</Unit>
</Project>
</CodeBlocks_project_file>

View File

@ -0,0 +1,517 @@
<?xml version="1.0" encoding="UTF-8"?>
<CodeBlocks_project_file>
<FileVersion major="1" minor="6"/>
<Project>
<Option title="googletest-distribution"/>
<Option makefile_is_custom="1"/>
<Option compiler="gcc"/>
<Option virtualFolders="CMake Files\;CMake Files\..\;CMake Files\..\..\;CMake Files\..\..\cmake\;CMake Files\..\..\cmake\external_libs\;CMake Files\..\..\..\;CMake Files\..\..\..\..\;CMake Files\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\home\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\;CMake Files\..\..\third_party\;CMake Files\..\..\third_party\securec\;CMake Files\..\..\third_party\securec\src\;CMake Files\..\..\third_party\googletest\;CMake Files\..\..\third_party\googletest\googlemock\;CMake Files\..\..\third_party\googletest\googlemock\cmake\;CMake Files\..\..\third_party\googletest\googletest\;CMake Files\..\..\third_party\googletest\googletest\cmake\;CMake Files\tools\;CMake Files\tools\converter\;CMake Files\tools\converter\parser\;CMake Files\tools\converter\parser\caffe\;CMake Files\tools\converter\parser\tflite\;CMake Files\tools\converter\optimizer\;CMake Files\tools\converter\optimizer\fusion\;CMake Files\tools\converter\optimizer\node\;CMake Files\tools\converter\optimizer\graph\;CMake Files\tools\converter\quantizer\;CMake Files\tools\benchmark\;CMake Files\src\;CMake Files\src\common\;CMake Files\src\common\anf_exporter\;CMake Files\src\runtime\;CMake Files\src\runtime\kernel\;CMake Files\src\runtime\kernel\arm\;CMake Files\src\runtime\kernel\arm\opclib\;CMake Files\src\ops\;CMake Files\test\;"/>
<Build>
<Target title="all">
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest"/>
<Option type="4"/>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/Makefile&quot; VERBOSE=1 all"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock_main">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/libgmock_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock_main"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock_main/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/libgmock_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock_main/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/libgmockd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/libgmockd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
</Build>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/src/gmock-all.cc">
<Option target="gmock"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/src/gmock_main.cc">
<Option target="gmock_main"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest-all.cc">
<Option target="gtest"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest_main.cc">
<Option target="gtest_main"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/CMakeLists.txt">
<Option virtualFolder="CMake Files\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_gtest.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_securec.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/utils.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/eigen.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/json.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/pybind11.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/FindPythonLibsNew.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Tools.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/Config.cmake.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/internal_utils.cmake">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/caffe/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\caffe\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/tflite/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\tflite\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/fusion/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\fusion\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/node/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\node\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/graph/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\graph\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/quantizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\quantizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/benchmark/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\benchmark\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/common/anf_exporter/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\common\anf_exporter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/opclib/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\opclib\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/ops/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\ops\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/test/CMakeLists.txt">
<Option virtualFolder="CMake Files\test\"/>
</Unit>
</Project>
</CodeBlocks_project_file>

View File

@ -0,0 +1,112 @@
<?xml version="1.0" encoding="UTF-8"?>
<CodeBlocks_project_file>
<FileVersion major="1" minor="6"/>
<Project>
<Option title="optimize"/>
<Option makefile_is_custom="1"/>
<Option compiler="gcc"/>
<Option virtualFolders="CMake Files\;CMake Files\..\;CMake Files\..\..\;CMake Files\..\..\cmake\;CMake Files\..\..\cmake\external_libs\;CMake Files\..\..\..\;CMake Files\..\..\..\..\;CMake Files\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\home\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\;CMake Files\..\..\third_party\;CMake Files\..\..\third_party\securec\;CMake Files\..\..\third_party\securec\src\;CMake Files\tools\;CMake Files\tools\converter\;CMake Files\tools\converter\parser\;CMake Files\tools\converter\parser\caffe\;CMake Files\tools\converter\parser\tflite\;CMake Files\tools\converter\optimizer\;CMake Files\tools\converter\optimizer\fusion\;CMake Files\tools\converter\optimizer\node\;CMake Files\tools\converter\optimizer\graph\;CMake Files\tools\converter\quantizer\;CMake Files\tools\benchmark\;CMake Files\src\;CMake Files\src\common\;CMake Files\src\common\anf_exporter\;CMake Files\src\runtime\;CMake Files\src\runtime\kernel\;CMake Files\src\runtime\kernel\arm\;CMake Files\src\runtime\kernel\arm\opclib\;CMake Files\src\ops\;"/>
<Build>
<Target title="all">
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/src/runtime/kernel/arm/opclib"/>
<Option type="4"/>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/src/runtime/kernel/arm/opclib/Makefile&quot; VERBOSE=1 all"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/src/runtime/kernel/arm/opclib/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/src/runtime/kernel/arm/opclib/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-cloud/src/runtime/kernel/arm/opclib/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
</Build>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/CMakeLists.txt">
<Option virtualFolder="CMake Files\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_securec.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/utils.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/eigen.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/json.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/pybind11.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/FindPythonLibsNew.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Tools.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/caffe/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\caffe\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/tflite/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\tflite\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/fusion/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\fusion\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/node/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\node\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/graph/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\graph\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/quantizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\quantizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/benchmark/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\benchmark\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/common/anf_exporter/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\common\anf_exporter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/opclib/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\opclib\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/ops/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\ops\"/>
</Unit>
</Project>
</CodeBlocks_project_file>

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,7 @@
# CMake generated Testfile for
# Source directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest
# Build directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest
#
# This file includes the relevant testing commands required for
# testing this directory and lists subdirectories to be tested as well.
subdirs("googlemock")

View File

@ -0,0 +1,7 @@
# CMake generated Testfile for
# Source directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock
# Build directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock
#
# This file includes the relevant testing commands required for
# testing this directory and lists subdirectories to be tested as well.
subdirs("gtest")

View File

@ -0,0 +1,517 @@
<?xml version="1.0" encoding="UTF-8"?>
<CodeBlocks_project_file>
<FileVersion major="1" minor="6"/>
<Project>
<Option title="gmock"/>
<Option makefile_is_custom="1"/>
<Option compiler="gcc"/>
<Option virtualFolders="CMake Files\;CMake Files\..\;CMake Files\..\..\;CMake Files\..\..\cmake\;CMake Files\..\..\cmake\external_libs\;CMake Files\..\..\..\;CMake Files\..\..\..\..\;CMake Files\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\home\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\;CMake Files\..\..\third_party\;CMake Files\..\..\third_party\securec\;CMake Files\..\..\third_party\securec\src\;CMake Files\..\..\third_party\googletest\;CMake Files\..\..\third_party\googletest\googlemock\;CMake Files\..\..\third_party\googletest\googlemock\cmake\;CMake Files\..\..\third_party\googletest\googletest\;CMake Files\..\..\third_party\googletest\googletest\cmake\;CMake Files\tools\;CMake Files\tools\converter\;CMake Files\tools\converter\parser\;CMake Files\tools\converter\parser\caffe\;CMake Files\tools\converter\parser\tflite\;CMake Files\tools\converter\optimizer\;CMake Files\tools\converter\optimizer\fusion\;CMake Files\tools\converter\optimizer\node\;CMake Files\tools\converter\optimizer\graph\;CMake Files\tools\converter\quantizer\;CMake Files\tools\benchmark\;CMake Files\src\;CMake Files\src\common\;CMake Files\src\common\anf_exporter\;CMake Files\src\runtime\;CMake Files\src\runtime\kernel\;CMake Files\src\runtime\kernel\arm\;CMake Files\src\runtime\kernel\arm\opclib\;CMake Files\src\ops\;CMake Files\test\;"/>
<Build>
<Target title="all">
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock"/>
<Option type="4"/>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 all"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock_main">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/libgmock_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock_main"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock_main/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/libgmock_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock_main/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/libgmockd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/libgmockd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
</Build>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/src/gmock-all.cc">
<Option target="gmock"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/src/gmock_main.cc">
<Option target="gmock_main"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest-all.cc">
<Option target="gtest"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest_main.cc">
<Option target="gtest_main"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/CMakeLists.txt">
<Option virtualFolder="CMake Files\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_gtest.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_securec.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/utils.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/eigen.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/json.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/pybind11.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/FindPythonLibsNew.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Tools.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/Config.cmake.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/internal_utils.cmake">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/caffe/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\caffe\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/tflite/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\tflite\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/fusion/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\fusion\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/node/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\node\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/graph/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\graph\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/quantizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\quantizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/benchmark/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\benchmark\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/common/anf_exporter/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\common\anf_exporter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/opclib/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\opclib\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/ops/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\ops\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/test/CMakeLists.txt">
<Option virtualFolder="CMake Files\test\"/>
</Unit>
</Project>
</CodeBlocks_project_file>

View File

@ -0,0 +1,6 @@
# CMake generated Testfile for
# Source directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest
# Build directory: /mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest
#
# This file includes the relevant testing commands required for
# testing this directory and lists subdirectories to be tested as well.

View File

@ -0,0 +1,33 @@
####### Expanded from @PACKAGE_INIT@ by configure_package_config_file() #######
####### Any changes to this file will be overwritten by the next CMake run ####
####### The input file was Config.cmake.in ########
get_filename_component(PACKAGE_PREFIX_DIR "${CMAKE_CURRENT_LIST_DIR}/../../../" ABSOLUTE)
macro(set_and_check _var _file)
set(${_var} "${_file}")
if(NOT EXISTS "${_file}")
message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !")
endif()
endmacro()
macro(check_required_components _NAME)
foreach(comp ${${_NAME}_FIND_COMPONENTS})
if(NOT ${_NAME}_${comp}_FOUND)
if(${_NAME}_FIND_REQUIRED_${comp})
set(${_NAME}_FOUND FALSE)
endif()
endif()
endforeach()
endmacro()
####################################################################################
include(CMakeFindDependencyMacro)
if (ON)
set(THREADS_PREFER_PTHREAD_FLAG ON)
find_dependency(Threads)
endif()
include("${CMAKE_CURRENT_LIST_DIR}/GTestTargets.cmake")
check_required_components("")

View File

@ -0,0 +1,37 @@
# This is a basic version file for the Config-mode of find_package().
# It is used by write_basic_package_version_file() as input file for configure_file()
# to create a version-file which can be installed along a config.cmake file.
#
# The created file sets PACKAGE_VERSION_EXACT if the current version string and
# the requested version string are exactly the same and it sets
# PACKAGE_VERSION_COMPATIBLE if the current version is >= requested version.
# The variable CVF_VERSION must be set before calling configure_file().
set(PACKAGE_VERSION "1.9.0")
if(PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION)
set(PACKAGE_VERSION_COMPATIBLE FALSE)
else()
set(PACKAGE_VERSION_COMPATIBLE TRUE)
if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION)
set(PACKAGE_VERSION_EXACT TRUE)
endif()
endif()
# if the installed project requested no architecture check, don't perform the check
if("FALSE")
return()
endif()
# if the installed or the using project don't have CMAKE_SIZEOF_VOID_P set, ignore it:
if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "" OR "8" STREQUAL "")
return()
endif()
# check that the installed version has the same 32/64bit-ness as the one which is currently searching:
if(NOT CMAKE_SIZEOF_VOID_P STREQUAL "8")
math(EXPR installedBits "8 * 8")
set(PACKAGE_VERSION "${PACKAGE_VERSION} (${installedBits}bit)")
set(PACKAGE_VERSION_UNSUITABLE TRUE)
endif()

View File

@ -0,0 +1,9 @@
libdir=/usr/local/lib
includedir=/usr/local/include
Name: gmock
Description: GoogleMock (without main() function)
Version: 1.9.0
URL: https://github.com/google/googletest
Libs: -L${libdir} -lgmock -pthread
Cflags: -I${includedir} -DGTEST_HAS_PTHREAD=1 -pthread

View File

@ -0,0 +1,9 @@
libdir=/usr/local/lib
includedir=/usr/local/include
Name: gmock_main
Description: GoogleMock (with main() function)
Version: 1.9.0
URL: https://github.com/google/googletest
Libs: -L${libdir} -lgmock_main -pthread
Cflags: -I${includedir} -DGTEST_HAS_PTHREAD=1 -pthread

View File

@ -0,0 +1,9 @@
libdir=/usr/local/lib
includedir=/usr/local/include
Name: gtest
Description: GoogleTest (without main() function)
Version: 1.9.0
URL: https://github.com/google/googletest
Libs: -L${libdir} -lgtest -pthread
Cflags: -I${includedir} -DGTEST_HAS_PTHREAD=1 -pthread

View File

@ -0,0 +1,10 @@
libdir=/usr/local/lib
includedir=/usr/local/include
Name: gtest_main
Description: GoogleTest (with main() function)
Version: 1.9.0
URL: https://github.com/google/googletest
Requires: gtest
Libs: -L${libdir} -lgtest_main -pthread
Cflags: -I${includedir} -DGTEST_HAS_PTHREAD=1 -pthread

View File

@ -0,0 +1,327 @@
<?xml version="1.0" encoding="UTF-8"?>
<CodeBlocks_project_file>
<FileVersion major="1" minor="6"/>
<Project>
<Option title="gtest"/>
<Option makefile_is_custom="1"/>
<Option compiler="gcc"/>
<Option virtualFolders="CMake Files\;CMake Files\..\;CMake Files\..\..\;CMake Files\..\..\cmake\;CMake Files\..\..\cmake\external_libs\;CMake Files\..\..\..\;CMake Files\..\..\..\..\;CMake Files\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\home\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\;CMake Files\..\..\third_party\;CMake Files\..\..\third_party\securec\;CMake Files\..\..\third_party\securec\src\;CMake Files\..\..\third_party\googletest\;CMake Files\..\..\third_party\googletest\googlemock\;CMake Files\..\..\third_party\googletest\googlemock\cmake\;CMake Files\..\..\third_party\googletest\googletest\;CMake Files\..\..\third_party\googletest\googletest\cmake\;CMake Files\tools\;CMake Files\tools\converter\;CMake Files\tools\converter\parser\;CMake Files\tools\converter\parser\caffe\;CMake Files\tools\converter\parser\tflite\;CMake Files\tools\converter\optimizer\;CMake Files\tools\converter\optimizer\fusion\;CMake Files\tools\converter\optimizer\node\;CMake Files\tools\converter\optimizer\graph\;CMake Files\tools\converter\quantizer\;CMake Files\tools\benchmark\;CMake Files\src\;CMake Files\src\common\;CMake Files\src\common\anf_exporter\;CMake Files\src\runtime\;CMake Files\src\runtime\kernel\;CMake Files\src\runtime\kernel\arm\;CMake Files\src\runtime\kernel\arm\opclib\;CMake Files\src\ops\;CMake Files\test\;"/>
<Build>
<Target title="all">
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option type="4"/>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 all"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
</Build>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest-all.cc">
<Option target="gtest"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest_main.cc">
<Option target="gtest_main"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/CMakeLists.txt">
<Option virtualFolder="CMake Files\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_gtest.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_securec.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/utils.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/eigen.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/json.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/pybind11.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/FindPythonLibsNew.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Tools.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/Config.cmake.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/internal_utils.cmake">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/caffe/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\caffe\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/tflite/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\tflite\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/fusion/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\fusion\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/node/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\node\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/graph/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\graph\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/quantizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\quantizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/benchmark/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\benchmark\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/common/anf_exporter/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\common\anf_exporter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/opclib/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\opclib\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/ops/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\ops\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/test/CMakeLists.txt">
<Option virtualFolder="CMake Files\test\"/>
</Unit>
</Project>
</CodeBlocks_project_file>

View File

@ -0,0 +1,517 @@
<?xml version="1.0" encoding="UTF-8"?>
<CodeBlocks_project_file>
<FileVersion major="1" minor="6"/>
<Project>
<Option title="googletest-distribution"/>
<Option makefile_is_custom="1"/>
<Option compiler="gcc"/>
<Option virtualFolders="CMake Files\;CMake Files\..\;CMake Files\..\..\;CMake Files\..\..\cmake\;CMake Files\..\..\cmake\external_libs\;CMake Files\..\..\..\;CMake Files\..\..\..\..\;CMake Files\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\home\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\;CMake Files\..\..\third_party\;CMake Files\..\..\third_party\securec\;CMake Files\..\..\third_party\securec\src\;CMake Files\..\..\third_party\googletest\;CMake Files\..\..\third_party\googletest\googlemock\;CMake Files\..\..\third_party\googletest\googlemock\cmake\;CMake Files\..\..\third_party\googletest\googletest\;CMake Files\..\..\third_party\googletest\googletest\cmake\;CMake Files\tools\;CMake Files\tools\converter\;CMake Files\tools\converter\parser\;CMake Files\tools\converter\parser\caffe\;CMake Files\tools\converter\parser\tflite\;CMake Files\tools\converter\optimizer\;CMake Files\tools\converter\optimizer\fusion\;CMake Files\tools\converter\optimizer\node\;CMake Files\tools\converter\optimizer\graph\;CMake Files\tools\converter\quantizer\;CMake Files\tools\benchmark\;CMake Files\src\;CMake Files\src\common\;CMake Files\src\common\anf_exporter\;CMake Files\src\runtime\;CMake Files\src\runtime\kernel\;CMake Files\src\runtime\kernel\arm\;CMake Files\src\runtime\kernel\arm\opclib\;CMake Files\src\ops\;CMake Files\test\;"/>
<Build>
<Target title="all">
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest"/>
<Option type="4"/>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/Makefile&quot; VERBOSE=1 all"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock_main">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/libgmock_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock_main"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock_main/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/libgmock_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock_main/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/libgmockd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gmock/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/libgmockd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 gmock/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest_main/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtest_maind.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest_main/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
<Target title="gtest/fast">
<Option output="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/libgtestd.so" prefix_auto="0" extension_auto="0"/>
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest"/>
<Option object_output="./"/>
<Option type="3"/>
<Option compiler="gcc"/>
<Compiler>
<Add option="-DGTEST_CREATE_SHARED_LIBRARY=1"/>
<Add option="-DUSE_ANDROID_LOG"/>
<Add option="-DNO_DLIB"/>
<Add option="-DBUILD_DEVICE"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../.."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/core"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/ccsrc"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/flatbuffers/include"/>
<Add directory="/home/hgq/.mslib/nlohmann_json_b73a91c3af99db76a4468ecba3b99509/."/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/cmake/../third_party/securec/include"/>
<Add directory="/mnt/data/software/anaconda/anaconda/include/python3.7m"/>
<Add directory="/home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/include"/>
<Add directory="/home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/include/eigen3"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/protobuf/build/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../tests/ut/cpp"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/tools"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../mindspore/lite/lite"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/include"/>
<Add directory="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest"/>
<Add directory="/usr/include/c++/7"/>
<Add directory="/usr/include/x86_64-linux-gnu/c++/7"/>
<Add directory="/usr/include/c++/7/backward"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include"/>
<Add directory="/usr/local/include"/>
<Add directory="/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed"/>
<Add directory="/usr/include/x86_64-linux-gnu"/>
<Add directory="/usr/include"/>
</Compiler>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 gtest/fast"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/googletest/googlemock/gtest/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
</Build>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/src/gmock-all.cc">
<Option target="gmock"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googlemock/src/gmock_main.cc">
<Option target="gmock_main"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest-all.cc">
<Option target="gtest"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/third_party/googletest/googletest/src/gtest_main.cc">
<Option target="gtest_main"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/CMakeLists.txt">
<Option virtualFolder="CMake Files\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_gtest.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_securec.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/utils.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/eigen.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/json.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/pybind11.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/FindPythonLibsNew.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Tools.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googlemock/cmake/gmock_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googlemock\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/Config.cmake.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/gtest_main.pc.in">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/googletest/googletest/cmake/internal_utils.cmake">
<Option virtualFolder="CMake Files\..\..\third_party\googletest\googletest\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/caffe/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\caffe\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/tflite/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\tflite\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/fusion/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\fusion\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/node/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\node\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/graph/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\graph\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/quantizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\quantizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/benchmark/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\benchmark\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/common/anf_exporter/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\common\anf_exporter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/opclib/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\opclib\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/ops/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\ops\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/test/CMakeLists.txt">
<Option virtualFolder="CMake Files\test\"/>
</Unit>
</Project>
</CodeBlocks_project_file>

View File

@ -0,0 +1,112 @@
<?xml version="1.0" encoding="UTF-8"?>
<CodeBlocks_project_file>
<FileVersion major="1" minor="6"/>
<Project>
<Option title="optimize"/>
<Option makefile_is_custom="1"/>
<Option compiler="gcc"/>
<Option virtualFolders="CMake Files\;CMake Files\..\;CMake Files\..\..\;CMake Files\..\..\cmake\;CMake Files\..\..\cmake\external_libs\;CMake Files\..\..\..\;CMake Files\..\..\..\..\;CMake Files\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\;CMake Files\..\..\..\..\..\..\..\..\home\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\;CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\;CMake Files\..\..\third_party\;CMake Files\..\..\third_party\securec\;CMake Files\..\..\third_party\securec\src\;CMake Files\tools\;CMake Files\tools\converter\;CMake Files\tools\converter\parser\;CMake Files\tools\converter\parser\caffe\;CMake Files\tools\converter\parser\tflite\;CMake Files\tools\converter\optimizer\;CMake Files\tools\converter\optimizer\fusion\;CMake Files\tools\converter\optimizer\node\;CMake Files\tools\converter\optimizer\graph\;CMake Files\tools\converter\quantizer\;CMake Files\tools\benchmark\;CMake Files\src\;CMake Files\src\common\;CMake Files\src\common\anf_exporter\;CMake Files\src\runtime\;CMake Files\src\runtime\kernel\;CMake Files\src\runtime\kernel\arm\;CMake Files\src\runtime\kernel\arm\opclib\;CMake Files\src\ops\;"/>
<Build>
<Target title="all">
<Option working_dir="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/src/runtime/kernel/arm/opclib"/>
<Option type="4"/>
<MakeCommands>
<Build command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/src/runtime/kernel/arm/opclib/Makefile&quot; VERBOSE=1 all"/>
<CompileFile command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/src/runtime/kernel/arm/opclib/Makefile&quot; VERBOSE=1 &quot;$file&quot;"/>
<Clean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/src/runtime/kernel/arm/opclib/Makefile&quot; VERBOSE=1 clean"/>
<DistClean command="/usr/bin/make -j8 -f &quot;/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/cmake-build-minnie/src/runtime/kernel/arm/opclib/Makefile&quot; VERBOSE=1 clean"/>
</MakeCommands>
</Target>
</Build>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/CMakeLists.txt">
<Option virtualFolder="CMake Files\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/dependency_securec.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/utils.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/eigen.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/json.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../cmake/external_libs/pybind11.cmake">
<Option virtualFolder="CMake Files\..\..\cmake\external_libs\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/FindPythonLibsNew.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/pybind11_4ff815e53c2c9c54255c528ad480c451/share/cmake/pybind11/pybind11Tools.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\pybind11_4ff815e53c2c9c54255c528ad480c451\share\cmake\pybind11\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Config.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3ConfigVersion.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../../../../../../../home/hgq/.mslib/eigen3_951666374a92d62fcd880d2baa7db402/share/eigen3/cmake/Eigen3Targets.cmake">
<Option virtualFolder="CMake Files\..\..\..\..\..\..\..\..\home\hgq\.mslib\eigen3_951666374a92d62fcd880d2baa7db402\share\eigen3\cmake\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/../../third_party/securec/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\..\..\third_party\securec\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/caffe/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\caffe\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/parser/tflite/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\parser\tflite\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/fusion/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\fusion\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/node/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\node\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/optimizer/graph/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\optimizer\graph\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/converter/quantizer/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\converter\quantizer\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/tools/benchmark/CMakeLists.txt">
<Option virtualFolder="CMake Files\tools\benchmark\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/common/anf_exporter/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\common\anf_exporter\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/runtime/kernel/arm/opclib/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\runtime\kernel\arm\opclib\"/>
</Unit>
<Unit filename="/mnt/data/workspace/OpenAI/Huawei/mindspore/mindspore/lite/src/ops/CMakeLists.txt">
<Option virtualFolder="CMake Files\src\ops\"/>
</Unit>
</Project>
</CodeBlocks_project_file>

View File

@ -0,0 +1,76 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_CONTEXT_H_
#define MINDSPORE_LITE_INCLUDE_CONTEXT_H_
#include <string>
#include <memory>
#include "include/ms_tensor.h"
namespace mindspore::lite {
class Allocator;
enum CpuBindMode {
MID_CPU = -1, /**< bind mid cpu first */
HIGHER_CPU = 1, /**< bind higher cpu first */
NO_BIND = 0 /**< no bind */
};
typedef enum { DT_CPU, DT_GPU, DT_NPU } DeviceType;
// brief NPUContext defined by MindSpore predict
typedef struct {
int freq{3};
int fmkType{0};
int modelType{0};
int deviceType{0};
std::string modelName = "default";
} NPUContext;
// brief DeviceContext defined by MindSpore predict
typedef struct {
DeviceType type;
// DLContext primary;
NPUContext npuCtx;
} DeviceContext;
// brief Context defined by MindSpore predict
class MS_API Context {
public:
// brief Constructor of MindSpore predict context using default value for parameters
//
// return Instance of MindSpore predict context.
Context();
// brief Constructor of MindSpore predict context using input value for parameters
//
// param[in] threadNum Define the threadNum during the runtime.
// param[in] allocator Define the allocator for malloc.
// param[in] deviceCtx Define device information during the runtime.
Context(int threadNum, std::shared_ptr<Allocator> allocator, DeviceContext deviceCtx);
// brief Destructor of MindSpore predict context
virtual ~Context();
public:
DeviceContext deviceCtx;
int threadNum = 2;
std::shared_ptr<Allocator> allocator;
CpuBindMode cpuBindMode = MID_CPU;
};
} // namespace mindspore::lite
#endif // MINDSPORE_LITE_INCLUDE_CONTEXT_H_

View File

@ -0,0 +1,55 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_ERRORCODE_H_
#define MINDSPORE_LITE_INCLUDE_ERRORCODE_H_
namespace mindspore {
namespace lite {
using STATUS = int;
/* Success */
constexpr int RET_OK = 0; /**< No error occurs. */
/* Common error code, range: [-1, -100]*/
constexpr int RET_ERROR = -1; /**< Common error code. */
constexpr int RET_NULL_PTR = -2; /**< NULL pointer returned.*/
constexpr int RET_PARAM_INVALID = -3; /**< Invalid parameter.*/
constexpr int RET_NO_CHANGE = -4; /**< No change. */
constexpr int RET_SUCCESS_EXIT = -5; /**< No error but exit. */
constexpr int RET_MEMORY_FAILED = -6; /**< Create memory failed. */
/* Executor error code, range: [-101,-200] */
constexpr int RET_OUT_OF_TENSOR_RANGE = -101; /**< Failed to checking range. */
constexpr int RET_INPUT_TENSOR_ERROR = -102; /**< Failed to checking input tensor. */
constexpr int RET_REENTRANT_ERROR = -103; /**< Exist executor running. */
/* Graph error code, range: [-201,-300] */
constexpr int RET_GRAPH_FILE_ERR = -201; /**< Failed to verify graph file. */
/* Node error code, range: [-301,-400] */
constexpr int RET_NOT_FIND_OP = -301; /**< Failed to find operator. */
constexpr int RET_INVALID_OP_NAME = -302; /**< Invalid operator name. */
constexpr int RET_INVALID_OP_ATTR = -303; /**< Invalid operator attr. */
constexpr int RET_OP_EXECUTE_FAILURE = -304; /**< Failed to execution operator. */
/* Tensor error code, range: [-401,-500] */
constexpr int RET_FORMAT_ERR = -401; /**< Failed to checking tensor format. */
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_INCLUDE_ERRORCODE_H_

View File

@ -0,0 +1,52 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_LITE_SESSION_H
#define MINDSPORE_LITE_INCLUDE_LITE_SESSION_H
#include <memory>
#include <vector>
#include <string>
#include "include/ms_tensor.h"
#include "include/model.h"
#include "include/context.h"
namespace mindspore {
namespace session {
class MS_API LiteSession {
public:
virtual ~LiteSession() = default;
virtual void BindThread(bool ifBind) = 0;
static LiteSession *CreateSession(lite::Context *context);
virtual int CompileGraph(lite::Model *model) = 0;
virtual std::vector<tensor::MSTensor *> GetInputs() = 0;
virtual std::vector<tensor::MSTensor *> GetInputsByName(std::string name) = 0;
virtual int RunGraph() = 0;
virtual std::vector<tensor::MSTensor *> GetOutputs() = 0;
virtual std::vector<tensor::MSTensor *> GetOutputsByName(std::string name) = 0;
};
} // namespace session
} // namespace mindspore
#endif // MINDSPORE_LITE_INCLUDE_LITE_SESSION_H

View File

@ -0,0 +1,57 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_MODEL_H
#define MINDSPORE_LITE_INCLUDE_MODEL_H
#include <string>
#include <vector>
#include <memory>
#include "schema/model_generated.h"
namespace mindspore {
class ModelImpl;
namespace lite {
class Primitive;
class Model {
public:
static std::shared_ptr<Model> Import(const char *model_buf, size_t size);
virtual ~Model() = default;
Model() = default;
lite::Primitive *GetOp(const std::string &name) const;
const schema::MetaGraph *GetMetaGraph() const;
std::shared_ptr<ModelImpl> GetModelImpl();
void FreeMetaGraph();
protected:
std::shared_ptr<ModelImpl> modelImpl = nullptr;
};
class ModelBuilder {
public:
struct OutEdge {
std::string nodeId;
size_t outEdgeIndex;
};
ModelBuilder() = default;
virtual ~ModelBuilder() = default;
virtual std::string AddOp(const lite::Primitive &op, const std::vector<OutEdge> &inputs) = 0;
virtual Model *Construct();
};
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_INCLUDE_MODEL_H

View File

@ -0,0 +1,70 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_MS_TENSOR_H_
#define MINDSPORE_INCLUDE_MS_TENSOR_H_
#include <utility>
#include <vector>
#include <memory>
#include "ir/dtype/type_id.h"
namespace mindspore {
#define MS_API __attribute__((visibility("default")))
namespace tensor {
class MS_API MSTensor {
public:
MSTensor() = default;
// brief Create a MSTensor pointer.
//
// param data_type DataTypeId of tensor to be created.
// param shape Shape of tensor to be created.
// return MSTensor pointer.
static MSTensor *CreateTensor(TypeId data_type, const std::vector<int> &shape);
virtual ~MSTensor() = default;
virtual TypeId data_type() const = 0;
virtual TypeId set_data_type(const TypeId data_type) = 0;
virtual std::vector<int> shape() const = 0;
virtual size_t set_shape(const std::vector<int> &shape) = 0;
virtual int DimensionSize(size_t index) const = 0;
// brief Get number of element in MSTensor.
//
// return Number of element in MSTensor.
virtual int ElementsNum() const = 0;
virtual std::size_t hash() const = 0;
// brief Get byte size of data in MSTensor.
//
// return Byte size of data in MSTensor.
virtual size_t Size() const = 0;
// brief Get pointer of data in MSTensor.
//
// The data pointer can be used to both write or read data in MSTensor.
//
// return A pointer points to data in MSTensor.
virtual void *MutableData() const = 0;
};
using MultiTensor = std::vector<std::vector<std::shared_ptr<tensor::MSTensor>>>;
} // namespace tensor
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_MS_TENSOR_H_

View File

@ -0,0 +1,208 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
include "ops.fbs";
namespace mindspore.schema;
enum NodeType: int {
ValueNode, // const
Parameter, // var
CNode // op
}
table QuantParam {
scale: double;
zeroPoint: int;
min: double = 0;
max: double = 0;
narrowRange: bool = true;
numBits: int = 8;
inited: bool = false;
}
table Tensor {
nodeType: NodeType;
// data type
dataType: int;
// shape
dims: [int];
format: Format;
refCount: int;
offset: int;
data: [ubyte];
quantParams: [QuantParam];
}
union PrimitiveType {
Concat,
SoftMax,
Activation,
Conv2D,
FusedBatchNorm,
CaffeBatchNorm,
BiasAdd,
Pooling,
DepthwiseConv2D,
DeDepthwiseConv2D,
Resize,
DetectionPostProcess,
FullConnection,
Mean,
DeConv2D,
Scale,
Reshape,
Eltwise,
NetOutput,
Add,
Sub,
MatMul,
StridedSlice,
Power,
Slice,
Stack,
Mul,
RealDiv,
Pad,
Maximum,
Minimum,
CaffePReLU,
LeakyReLU,
ArgMax,
ArgMin,
Exp,
Crop,
Range,
Rsqrt,
ExpandDims,
Tile,
Cast,
Shape,
Nchw2Nhwc,
Nhwc2Nchw,
QuantDTypeCast,
Split,
Permute,
FakeQuantWithMinMaxVars,
Equal,
Less,
Greater,
NotEqual,
LessEqual,
GreaterEqual,
Min,
Floor,
Abs,
Neg,
Cos,
Sin,
Sqrt,
Square,
Constant,
Log,
Tan,
Atan,
Asin,
Clip,
Transpose,
Squeeze,
Unsqueeze,
Upsample,
Dropout,
Broadcast,
BroadcastTo,
Lrn,
Prelu,
ZerosLike,
TopK,
SpaceToDepth,
SpaceToBatch,
SparseToDense,
ReverseSequence,
Rank,
Gather,
GatherNd,
Fill,
Elu,
DepthToSpace,
BatchToSpace,
AddN,
Ceil,
EmbeddingLookup,
EmbeddingLookupSparse,
FloorDiv,
FloorMod,
L2Norm,
LocalResponseNormalization,
MatrixDiag,
Reduce,
Reverse,
Round,
Select,
Scatter,
ScatterND,
Unique,
Unstack,
LogicalAnd,
LogicalOr,
LogicalXor,
LogicalNot,
OnnxInt8Quantize,
OnnxInt8Dequantize,
FakeQuantWithMinMax,
FakeQuantWithMinMaxPerChannel,
BatchNormFold,
MulFold,
AddFold,
SquaredDifference,
Flatten,
TupleGetItem,
Div,
Where,
OneHot
}
enum QuantType: int {
QUANT_NONE,
AwareTrainning,
WeightQuant,
PostTraining
}
table Primitive {
value: PrimitiveType;
}
table CNode {
name: string;
nodeType: NodeType = CNode;
primitive: Primitive;
inputIndex: [uint];
outputIndex: [uint];
quantType: QuantType = QUANT_NONE;
}
table MetaGraph {
name: string;
fmkType: int; // 0:tf,1:caffe
inputIndex: [uint];
outputIndex: [uint];
mempoolSize: uint;
nodes: [CNode];
allTensors: [Tensor]; // weight + input + output
}
root_type MetaGraph;

View File

@ -0,0 +1,719 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace mindspore.schema;
enum ResizeMethod: byte {
UNKNOW = -1,
BILINEAR = 0,
NEAREST_NEIGHBOR = 1
}
enum Format : int {
NCHW = 0,
NHWC,
NHWC4,
HWKC,
HWCK,
KCHW,
CKHW,
KHWC,
CHWK,
NC4HW4 = 100,
NUM_OF_FORMAT
}
enum ActivationType : byte {
NO_ACTIVATION = 0,
RELU = 1,
SIGMOID = 2,
RELU6 = 3,
ELU = 4,
LEAKY_RELU = 5,
ABS = 6,
RELU1 = 7,
SOFTSIGN = 8,
SOFTPLUS = 9,
TANH = 10,
SELU = 11,
HSWISH = 12,
HSIGMOID = 13,
THRESHOLDRELU = 14,
LINEAR = 15,
UNKNOW = 16
}
enum ReduceType : byte {
REDUCE_MAX = 0,
REDUCE_MEAN = 1,
REDUCE_ALL = 2,
REDUCE_ANY = 3,
REDUCE_LOG_SUM_EXP = 4,
REDUCE_PROD = 5,
REDUCE_SUM = 6,
UNKNOW = 7
}
enum PoolMode : byte {
MAX_POOLING = 0,
MEAN_POOLING = 1,
}
enum EltwiseMode : byte {
PROD = 0,
SUM = 1,
MAXIMUM = 2,
UNKNOW = 3
}
enum PadMode : byte {
NOTSET = 0,
SAME = 1,
VALID = 2,
CAFFE = 4
}
enum RoundMode : byte {
FLOOR = 0,
CEIL = 1
}
enum PaddingMode : byte {
CONSTANT = 0,
REFLECT = 1,
SYMMETRIC = 2,
MODE_RESERVED = 3
}
table Pad {
paddingmode: PaddingMode;
paddings: [int];
}
table Maximum {
}
table Minimum {
}
table Flatten {
}
table Concat {
axis: int;
n: int;
}
table SoftMax {
axis: int;
}
table Activation {
type: ActivationType = 0;
}
table Conv2D {
format: Format = 0;
group: int;
channelIn: int;
channelOut: int;
kernelW: int;
kernelH: int;
strideW: int;
strideH: int;
padMode: PadMode;
padUp: int;
padDown: int;
padLeft: int;
padRight: int;
dilateW: int;
dilateH: int;
hasBias: bool = false;
activationType: ActivationType = 0;
}
table FusedBatchNorm {
epsilon: float = 0.00001; // eg. epsilon=0.001
momentum: float = 0.9;
spatial: int = 1;
}
table CaffeBatchNorm {
epsilon: float; // eg. epsilon=0.001
}
table Shape {
}
table Nchw2Nhwc {
}
table Nhwc2Nchw {
}
table FakeQuantWithMinMaxVars {
narrowRange: bool;
numBits: int;
}
table BiasAdd {
axis: [int];
}
table Pooling {
format: Format = 0;
poolingMode: PoolMode;
global: bool = false;
windowW: int;
windowH: int;
strideW: int;
strideH: int;
padMode: PadMode;
padUp: int;
padDown: int;
padLeft: int;
padRight: int;
roundMode: RoundMode;
}
table DepthwiseConv2D {
format: Format = 0;
channelIn: int;
channelMultiplier: int;
kernelW: int;
kernelH: int;
strideW: int;
strideH: int;
padMode: PadMode;
padUp: int;
padDown: int;
padLeft: int;
padRight: int;
dilateW: int;
dilateH: int;
hasBias: bool = false;
activationType: ActivationType = 0;
}
table DeDepthwiseConv2D {
format: Format = 0;
channelIn: int;
channelMultiplier: int;
kernelW: int;
kernelH: int;
strideW: int;
strideH: int;
padMode: PadMode;
padUp: int;
padDown: int;
padLeft: int;
padRight: int;
dilateW: int;
dilateH: int;
hasBias: bool = false;
activationType: ActivationType = 0;
}
table Resize {
format: Format = 0;
method: ResizeMethod;
newHeight: long;
newWidth: long;
alignCorners: bool = false;
preserveAspectRatio: bool = false;
}
table DetectionPostProcess {
format: Format = 0;
inputSize: int;
hScale: float;
wScale: float;
xScale: float;
yScale: float;
NmsIouThreshold: float;
NmsScoreThreshold: float;
MaxDetections: long;
DetectionsPreClass: long;
MaxClassesPreDetection: long;
NumClasses: long;
UseRegularNms: bool;
}
table FullConnection {
hasBias: bool;
axis: int;
}
// Mean(input_tensor, axis, keep_dims)
table Mean {
axis: [int];
keepDims: bool = false;
}
table DeConv2D {
format: Format = 0;
group: int;
channelIn: int;
channelOut: int;
kernelW: int;
kernelH: int;
strideW: int;
strideH: int;
padMode: PadMode;
padUp: int;
padDown: int;
padLeft: int;
padRight: int;
dilateW: int;
dilateH: int;
hasBias: bool = false;
activationType: ActivationType = 0;
}
table Scale {
format: Format = 0;
}
table Eltwise {
mode: EltwiseMode;
}
table Add {
}
table Sub {
}
table Mul {
}
table Div {
}
table RealDiv {
}
table Rsqrt {
}
table Equal {
}
table Less {
}
table Greater {
}
table NotEqual {
}
table LessEqual {
}
table GreaterEqual {
}
table Min {
}
table Slice {
format: Format = 0;
begin: [int];
size: [int];
}
table Floor {
}
table Abs {
}
table Neg {
}
table Exp {
}
table Cos {
}
table Sin {
}
table Sqrt {
}
table Square {
}
table Ceil {
}
table Log {
}
table Tan {
}
table Atan {
}
table Asin {
}
table Reshape {
format: Format = 0;
shape: [long];
}
table Power {
power: float;
scale: float;
shift: float;
}
table ArgMax {
axis: int;
outMaxValue: bool;
topK: int = 1;
keepDims: bool;
axisType: int;
}
table ArgMin {
axis: int;
outMaxValue: bool;
topK: int = 1;
keepDims: bool;
axisType: int;
}
table NetOutput {
}
table MatMul {
transposeA : bool = false;
transposeB : bool = false;
}
table CaffePReLU {
channelShared : bool = false;
}
table LeakyReLU {
negativeSlope: float;
}
table StridedSlice {
beginMask: int;
endMask: int;
ellipsisMask: int;
newAxisMask: int;
shrinkAxisMask: int;
begin: [int];
end: [int];
stride: [int];
isScale: [int];
}
table Stack {
axis: int;
n: int;
isScale: [int];
}
table Range {
dType: int;
start: int;
limit: int;
delta: int;
}
table ExpandDims {
dim: int;
}
table Tile {
multiples: [int];
}
table Cast {
srcT: int;
dstT: int;
}
table QuantDTypeCast {
srcT: int;
dstT: int;
}
table Split {
numberSplit: int;
sizeSplits: [int];
splitDim: int;
}
table Crop {
axis : long;
offsets : [long];
}
table Permute {
order: [long];
}
table Clip {
max: float;
min: float;
}
table Constant {
}
table Elu {
alpha: float = 1.0;
}
table Broadcast {
}
table BroadcastTo {
dst_shape: [int];
}
table Lrn {
alpha: float = 0.0001;
beta: float = 0.75;
bias: float = 1.0;
size: int;
}
enum ReduceMode : byte {
ReduceMean = 0,
ReduceMax = 1,
ReduceMin = 2,
ReduceProd = 3,
ReduceSum = 4,
ReduceSumSquare = 5
}
table Reduce {
axes: [int];
keepDims: int;
mode: ReduceMode;
}
table Prelu {
slope: [float];
}
table Transpose {
perm: [int];
conjugate: bool = false;
}
table Squeeze {
axis: [int];
}
table Unsqueeze {
axis: [int];
}
table Upsample {
mode: string;
scales: [float];
}
table Dropout {
ratio : float = 0.5;
}
table LocalResponseNormalization {
depth_radius: int;
bias: float;
alpha: float;
beta: float;
}
table ZerosLike {
}
table TopK {
k : int;
sorted : bool = true;
}
table SpaceToDepth {
blockSize : int;
format: Format = 0;
}
table SpaceToBatch {
blockShape : [int];
paddings : [int];
}
table SparseToDense {
validateIndices: bool;
}
table ReverseSequence {
seqAxis: int;
batchAxis: int;
}
table Rank {
}
table Gather {
axis: int;
batchDims: int;
}
table GatherNd {
batchDims: int;
}
table Fill {
dims: [int];
}
table DepthToSpace {
blockSize: int;
format: Format = 0;
}
table BatchToSpace {
blockShape: [int];
crops: [int];
}
table AddN {
N: int;
}
table EmbeddingLookup {
ids: [int];
maxNorm: float;
}
table EmbeddingLookupSparse {
spIds: [int];
spWeights: [float];
//combiner: Combiner=0;
maxNortm: float;
}
table FloorDiv {
}
table FloorMod {
}
table L2Norm {
axis: [int];
epsilon: float;
}
table LogicalAnd {
}
table LogicalOr {
}
table LogicalXor {
}
table LogicalNot {
}
table MatrixDiag {
k: int;
numRows: int;
numCols: int;
paddingValue: float;
}
table Select {
}
table TfReduce {
type: ReduceType = 7;
}
table Reverse {
axis: [int];
}
table Round {
}
table Scatter {
}
table ScatterND {
}
table Unique {
}
table Unstack {
num: int;
axis: int;
}
table OnnxInt8Quantize {
}
table OnnxInt8Dequantize {
}
table FakeQuantWithMinMax {
}
table FakeQuantWithMinMaxPerChannel {
}
table BatchNormFold {
}
table MulFold {
}
table AddFold {
}
table SquaredDifference {
}
table TupleGetItem {
}
table Where{
}
table OneHot {
axis: int;
}

View File

@ -0,0 +1,83 @@
set(LITE_SRC
${CMAKE_CURRENT_SOURCE_DIR}/common/graph_util.cc
${CMAKE_CURRENT_SOURCE_DIR}/runtime/allocator.cc
${CMAKE_CURRENT_SOURCE_DIR}/runtime/runtime_api.cc
${CMAKE_CURRENT_SOURCE_DIR}/runtime/thread_pool.cc
${CMAKE_CURRENT_SOURCE_DIR}/runtime/workspace_pool.cc
${CMAKE_CURRENT_SOURCE_DIR}/ir/tensor.cc
${CMAKE_CURRENT_SOURCE_DIR}/context.cc
${CMAKE_CURRENT_SOURCE_DIR}/executor.cc
${CMAKE_CURRENT_SOURCE_DIR}/kernel_factory.cc
${CMAKE_CURRENT_SOURCE_DIR}/kernel_registry.cc
${CMAKE_CURRENT_SOURCE_DIR}/lite_kernel.cc
${CMAKE_CURRENT_SOURCE_DIR}/model.cc
${CMAKE_CURRENT_SOURCE_DIR}/populate_parameter.cc
${CMAKE_CURRENT_SOURCE_DIR}/scheduler.cc
)
if (SUPPORT_GPU)
list(APPEND LITE_SRC ${CMAKE_CURRENT_SOURCE_DIR}/runtime/kernel/opencl/subgraph_opencl_kernel.cc)
list(APPEND LITE_SRC ${CMAKE_CURRENT_SOURCE_DIR}/runtime/kernel/opencl/utils.cc)
endif()
if (SUPPORT_TRAIN)
set(ANF_SRC
# ${CCSRC_DIR}/common/trans.cc
# ${CCSRC_DIR}/utils/lite/base_ref_utils.cc
# ${CCSRC_DIR}/runtime/kernel/kernel_compiler/kernel_build_info.cc
# ${CCSRC_DIR}/session/lite/anf_runtime_algorithm_extends.cc
# ${CCSRC_DIR}/session/lite/session_basic_extends.cc
# ${CCSRC_DIR}/session/anf_runtime_algorithm.cc
# ${CCSRC_DIR}/session/session_basic.cc
# ${CCSRC_DIR}/session/kernel_graph.cc
# ${CCSRC_DIR}/session/session_factory.cc
# ${CCSRC_DIR}/device/kernel_info.cc
# ${CCSRC_DIR}/device/kernel_runtime.cc
# ${CCSRC_DIR}/device/lite/kernel_runtime_extends.cc
)
set(PASS_SRC)
set(LITE_SRC
${LITE_SRC}
${ANF_SRC}
${PASS_SRC}
${CMAKE_CURRENT_SOURCE_DIR}/common/anf_importer/anf_importer.cc
${CMAKE_CURRENT_SOURCE_DIR}/common/anf_importer/import_from_meta_graph.cc
${CMAKE_CURRENT_SOURCE_DIR}/ir/primitive_value.cc
${CMAKE_CURRENT_SOURCE_DIR}/train/lite_kernel_runtime.cc
${CMAKE_CURRENT_SOURCE_DIR}/train/train_session.cc
${CMAKE_CURRENT_SOURCE_DIR}/train/model_impl.cc
)
else ()
set(LITE_SRC
${LITE_SRC}
${CMAKE_CURRENT_SOURCE_DIR}/lite_session.cc
${CMAKE_CURRENT_SOURCE_DIR}/model_impl.cc
)
endif ()
if (SUPPORT_GPU)
set(LITE_SRC
${LITE_SRC}
${CMAKE_CURRENT_SOURCE_DIR}/runtime/opencl/opencl_executor.cc
${CMAKE_CURRENT_SOURCE_DIR}/runtime/opencl/opencl_allocator.cc
${CMAKE_CURRENT_SOURCE_DIR}/runtime/opencl/opencl_runtime.cc
${CMAKE_CURRENT_SOURCE_DIR}/runtime/opencl/opencl_wrapper.cc
)
endif ()
set(ANF_SRC
${ANF_SRC}
${CMAKE_CURRENT_SOURCE_DIR}/ir/meta_tensor_extends.cc
)
add_library(mindspore-lite SHARED ${LITE_SRC} ${ANF_SRC})
target_link_libraries(mindspore-lite
cpu_kernel_mid_
ops_mid_
${SECUREC_LIBRARY}
mindspore::json
)
add_subdirectory(runtime/kernel/arm)
add_subdirectory(ops)

View File

@ -0,0 +1,7 @@
file(GLOB_RECURSE ANF_SRC_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR}
*.cc
)
add_library(anf_exporter_mid OBJECT
${ANF_SRC_LIST}
)

View File

@ -0,0 +1,263 @@
/**
* This is the C++ adaptation and derivative work of Myia (https://github.com/mila-iqia/myia/).
*
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_exporter.h"
#include <memory>
#include <utility>
#include <vector>
#include <string>
#include "abstract/abstract_value.h"
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "src/param_value_lite.h"
#include "mindspore/core/ir/primitive.h"
#include "src/ir/primitive_t_value.h"
#include "base/core_ops.h"
namespace mindspore::lite {
schema::MetaGraphT *AnfExporter::Export(const FuncGraphPtr &funcGraph) {
auto cnodes = funcGraph->GetOrderedCnodes();
auto metaGraphT = std::make_unique<schema::MetaGraphT>();
for (const auto &cnode : cnodes) {
auto primitive = GetValueNode<PrimitivePtr>(cnode->input(0));
if (primitive != nullptr && primitive == prim::kPrimReturn) {
// set graph outputs tensors
auto inputNode = cnode->input(1);
if (!inputNode->isa<CNode>()) {
continue;
}
auto inputCNode = utils::cast<CNodePtr>(inputNode);
auto inputPrimitive = GetValueNode<PrimitivePtr>(inputCNode->input(0));
if (inputPrimitive == prim::kPrimMakeTuple) {
continue;
} else {
std::string inputName = inputNode->fullname_with_scope();
auto graphOutput = nodeIdMap[inputName];
metaGraphT->outputIndex.emplace_back(graphOutput);
}
continue;
}
if (primitive != nullptr && primitive == prim::kPrimMakeTuple) {
for (size_t i = 1; i < cnode->inputs().size(); i++) {
auto graphOutNode = cnode->input(i);
if (!graphOutNode->isa<CNode>()) {
MS_LOG(ERROR) << "Inputs of MakeTuple should be cNode";
return nullptr;
}
std::string graphOutNodeName = graphOutNode->fullname_with_scope();
auto graphOutIndex = nodeIdMap[graphOutNodeName];
metaGraphT->outputIndex.emplace_back(graphOutIndex);
}
continue;
}
auto node = std::make_unique<schema::CNodeT>();
node->name = cnode->fullname_with_scope();
node->nodeType = schema::NodeType_CNode;
// populate primitive
if (primitive != nullptr) {
primitive = GetValueNode<PrimitivePtr>(cnode->input(0));
MS_ASSERT(primitive != nullptr);
std::string opType = primitive->name();
auto nodeParser = AnfNodePopulaterRegistry::GetInstance()->GetNodePopulater(opType);
if (nodeParser == nullptr) {
MS_LOG(ERROR) << "Find op parser failed, opType: " << opType;
return nullptr;
}
std::vector<schema::TensorT *> outputs;
nodeParser->Parse(cnode, node.get(), &outputs);
SetOpInputNode(cnode, metaGraphT.get(), node.get());
SetOpOutputNode(outputs, metaGraphT.get(), node.get());
metaGraphT->nodes.emplace_back(std::move(node));
continue;
}
auto primitiveT_value = GetValueNode<std::shared_ptr<PrimitiveTValue>>(cnode->input(0));
if (primitiveT_value == nullptr) {
MS_LOG(ERROR) << "PrimitiveT_value is nullptr";
return nullptr;
}
auto *lite_primitive = primitiveT_value->GetPrimitiveT();
if (lite_primitive == nullptr) {
MS_LOG(ERROR) << "Primitive in primitiveT_value is nullptr";
return nullptr;
}
node->primitive = std::unique_ptr<schema::PrimitiveT>(primitiveT_value->GetPrimitiveT());
primitiveT_value->SetPrimitiveT(nullptr);
std::vector<schema::TensorT *> outputs;
SetOpInputNode(cnode, metaGraphT.get(), node.get());
SetOpOutputNode(outputs, metaGraphT.get(), node.get());
// add quant param
node->quantType = primitiveT_value->GetQuantType();
if (node->quantType == schema::QuantType_PostTraining) {
MS_LOG(INFO) << "node: " << node->name << " add QuantParam";
// activation
auto activate_index = node->inputIndex[0];
auto tensor_input = metaGraphT->allTensors[activate_index].get();
auto input_quant_params = primitiveT_value->GetInputQuantParams();
if (input_quant_params.empty()) {
MS_LOG(WARNING) << "node: " << node->name << " input quant params is empty";
continue;
}
std::unique_ptr<schema::QuantParamT> input_quant_param =
std::make_unique<schema::QuantParamT>(input_quant_params[0]);
tensor_input->quantParams.emplace_back(std::move(input_quant_param));
// output
auto output_index = node->outputIndex[0];
auto tensor_output = metaGraphT->allTensors[output_index].get();
auto output_quant_params = primitiveT_value->GetOutputQuantParams();
if (output_quant_params.empty()) {
MS_LOG(WARNING) << "node: " << node->name << " output quant params is empty";
continue;
}
std::unique_ptr<schema::QuantParamT> output_quant_param =
std::make_unique<schema::QuantParamT>(output_quant_params[0]);
tensor_output->quantParams.emplace_back(std::move(output_quant_param));
// // TensorType
// valuePtr = primitive->GetAttr(kInputTensorDataType);
// if (valuePtr != nullptr) {
// MS_LOG(INFO) << "node: " << node->name << " input tensor data type: " << GetValue<int>(valuePtr);
// for (auto input : node->inputIndex) {
// auto tensor = subGraph->allTensors[input].get();
// tensor->dataType = kNumberTypeUInt8;
// }
// }
}
metaGraphT->nodes.emplace_back(std::move(node));
}
// set graph input tensors
for (auto node : graphInputNodes) {
for (auto input : node->inputIndex) {
auto tensor = metaGraphT->allTensors[input].get();
if (tensor->data.empty()) {
tensor->nodeType = schema::NodeType_ValueNode;
// tensor->refCount = lite::MSCONST_WEIGHT_REFCOUNT;
metaGraphT->inputIndex.emplace_back(input);
}
}
}
return metaGraphT.release();
}
void AnfExporter::SetOpInputNode(const CNodePtr &cnode, schema::MetaGraphT *meta_graph, schema::CNodeT *fbNode) {
MS_ASSERT(nullptr != meta_graph);
MS_ASSERT(nullptr != fbNode);
if (cnode->inputs().size() <= 1) {
return;
}
std::string cNodeName = cnode->fullname_with_scope();
bool isGraphInput = true;
for (int i = 1; i < static_cast<int>(cnode->inputs().size()); i++) {
auto inputNode = cnode->input(i);
if (inputNode->isa<CNode>()) {
isGraphInput = false;
std::string inputName = inputNode->fullname_with_scope();
if (nodeIdMap.find(inputName) != nodeIdMap.end()) {
fbNode->inputIndex.emplace_back(nodeIdMap[inputName]);
}
} else if (inputNode->isa<Parameter>()) {
auto paramNode = inputNode->cast<ParameterPtr>();
if (paramNode->name().empty()) {
paramNode->set_name(cNodeName + "_i:" + std::to_string(i - 1));
}
if (nodeIdMap.find(paramNode->name()) != nodeIdMap.end()) {
fbNode->inputIndex.emplace_back(nodeIdMap[paramNode->name()]);
continue;
}
auto paramTensor = std::make_unique<schema::TensorT>();
auto abstractBase = paramNode->abstract();
if (abstractBase == nullptr) {
MS_LOG(ERROR) << "Abstract of parameter is nullptr, " << paramNode->name();
MS_ASSERT(false);
return;
}
if (!utils::isa<abstract::AbstractTensorPtr>(abstractBase)) {
MS_LOG(ERROR) << "Abstract of parameter should be anstract tensor, " << paramNode->name();
MS_ASSERT(false);
return;
}
auto abstractTensor = utils::cast<abstract::AbstractTensorPtr>(abstractBase);
auto typePtr = abstractTensor->element()->GetTypeTrack();
MS_ASSERT(typePtr != nullptr);
paramTensor->dataType = typePtr->type_id();
if (!utils::isa<abstract::ShapePtr>(abstractTensor->BuildShape())) {
MS_LOG(ERROR) << "Shape of Abstract of parameter should be ShapePtr, " << paramNode->name();
MS_ASSERT(false);
return;
}
paramTensor->dims = utils::cast<abstract::ShapePtr>(abstractTensor->BuildShape())->shape();
auto paramValue = std::dynamic_pointer_cast<ParamValueLite>(paramNode->default_param());
if (paramValue != nullptr) {
paramTensor->nodeType = schema::NodeType_ValueNode;
paramTensor->data.resize(paramValue->tensor_size());
memcpy(paramTensor->data.data(), paramValue->tensor_addr(), paramValue->tensor_size());
}
// for (auto &ite : paramValue->quant_param()) {
// auto quantPar = std::make_unique<schema::QuantParamT>();
// quantPar->scale = ite->scale;
// quantPar->zeroPoint = ite->zeroPoint;
// quantPar->min = ite->min;
// quantPar->max = ite->max;
// quantPar->narrowRange = ite->narrowRange;
// quantPar->inited = ite->inited;
// quantPar->numBits = ite->numBits;
// paramTensor->quantParams.emplace_back(std::move(quantPar));
// }
nodeIdMap[paramNode->fullname_with_scope()] = meta_graph->allTensors.size();
fbNode->inputIndex.emplace_back(meta_graph->allTensors.size());
meta_graph->allTensors.emplace_back(std::move(paramTensor));
}
}
if (isGraphInput) {
graphInputNodes.emplace_back(fbNode);
}
}
void AnfExporter::SetOpOutputNode(const std::vector<schema::TensorT *> &outputTensors, schema::MetaGraphT *graph,
schema::CNodeT *cnode) {
MS_ASSERT(nullptr != graph);
MS_ASSERT(nullptr != cnode);
std::string cnodeName = cnode->name;
if (!outputTensors.empty()) {
int i = 0;
for (auto outputTensor : outputTensors) {
std::string name = cnodeName + "_o:" + std::to_string(i);
nodeIdMap[name] = graph->allTensors.size();
cnode->outputIndex.emplace_back(graph->allTensors.size());
graph->allTensors.emplace_back(outputTensor);
i++;
}
return;
}
auto msTensor = new schema::TensorT();
msTensor->nodeType = schema::NodeType_Parameter;
cnode->outputIndex.emplace_back(graph->allTensors.size());
nodeIdMap[cnodeName] = graph->allTensors.size();
graph->allTensors.emplace_back(msTensor);
}
schema::MetaGraphT *Export(const FuncGraphPtr &funcGraph) {
AnfExporter anfExporter;
return anfExporter.Export(funcGraph);
}
} // namespace mindspore::lite

View File

@ -0,0 +1,46 @@
/**
* This is the C++ adaptation and derivative work of Myia (https://github.com/mila-iqia/myia/).
*
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_SRC_ANF_EXPORTER_ANF_EXPORTER_H_
#define MINDSPORE_LITE_SRC_ANF_EXPORTER_ANF_EXPORTER_H_
#include <map>
#include <string>
#include <vector>
#include "schema/inner/model_generated.h"
#include "ir/func_graph.h"
namespace mindspore::lite {
class AnfExporter {
public:
AnfExporter() = default;
virtual ~AnfExporter() = default;
schema::MetaGraphT *Export(const FuncGraphPtr &funcGraph);
void SetOpOutputNode(const std::vector<schema::TensorT *> &outputTensors, schema::MetaGraphT *graph,
schema::CNodeT *cnode);
void SetOpInputNode(const CNodePtr &cnode, schema::MetaGraphT *meta_graph, schema::CNodeT *fbNode);
private:
std::map<std::string, int> nodeIdMap;
std::vector<schema::CNodeT *> graphInputNodes;
};
schema::MetaGraphT *Export(const FuncGraphPtr &funcGraph);
} // namespace mindspore::lite
#endif // MINDSPORE_LITE_SRC_ANF_EXPORTER_ANF_EXPORTER_H_

View File

@ -0,0 +1,42 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_activation_populater.h"
#include <vector>
#include <memory>
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "ir/func_graph.h"
#include "ir/primitive.h"
namespace mindspore::lite {
int mindspore::lite::AnfActivationPopulater::Parse(mindspore::CNodePtr cnodePtr, schema::CNodeT *node,
std::vector<schema::TensorT *> *outputs) {
auto p = GetCNodePrimitive(cnodePtr);
auto attr = std::make_unique<schema::ActivationT>();
if (p->name() == "ReLU") {
attr->type = schema::ActivationType_RELU;
} else if (p->name() == "Sigmoid") {
attr->type = schema::ActivationType_SIGMOID;
}
node->nodeType = schema::NodeType_CNode;
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_Activation;
node->primitive->value.value = attr.release();
return 0;
}
AnfNodePopulaterRegistrar anfReLUParser("ReLU", new AnfActivationPopulater());
AnfNodePopulaterRegistrar anfSigmoidParser("Sigmoid", new AnfActivationPopulater());
} // namespace mindspore::lite

View File

@ -0,0 +1,30 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_ACTIVATION_PARSER_H
#define MINDSPORE_ANF_ACTIVATION_PARSER_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <vector>
namespace mindspore::lite {
class AnfActivationPopulater : public AnfNodePopulater {
public:
AnfActivationPopulater() = default;
~AnfActivationPopulater() override = default;
int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) override;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_ACTIVATION_PARSER_H

View File

@ -0,0 +1,37 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_batchnorm_populater.h"
#include <vector>
#include <memory>
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "ir/func_graph.h"
#include "ir/primitive.h"
namespace mindspore::lite {
int mindspore::lite::AnfBatchnormParser::Parse(mindspore::CNodePtr cnodePtr, schema::CNodeT *node,
std::vector<schema::TensorT *> *outputs) {
auto p = GetCNodePrimitive(cnodePtr);
auto attr = std::make_unique<schema::FusedBatchNormT>();
attr->epsilon = GetValue<float>(p->GetAttr("epsilon"));
node->nodeType = schema::NodeType_CNode;
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_FusedBatchNorm;
node->primitive->value.value = attr.release();
return 0;
}
AnfNodePopulaterRegistrar anfBatchnormParser("BatchNorm", new AnfBatchnormParser());
} // namespace mindspore::lite

View File

@ -0,0 +1,29 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_BATCHNORM_PARSER_H
#define MINDSPORE_ANF_BATCHNORM_PARSER_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <vector>
namespace mindspore::lite {
class AnfBatchnormParser : public AnfNodePopulater {
public:
AnfBatchnormParser() = default;
~AnfBatchnormParser() override = default;
int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) override;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_BATCHNORM_PARSER_H

View File

@ -0,0 +1,37 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_biasadd_populater.h"
#include <vector>
#include <memory>
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "ir/func_graph.h"
#include "ir/primitive.h"
namespace mindspore::lite {
int mindspore::lite::AnfBiasAddPopulater::Parse(mindspore::CNodePtr cnodePtr, schema::CNodeT *node,
std::vector<schema::TensorT *> *outputs) {
auto attr = std::make_unique<schema::BiasAddT>();
attr->axis = {0};
node->nodeType = schema::NodeType_CNode;
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_BiasAdd;
node->primitive->value.value = attr.release();
return 0;
}
AnfNodePopulaterRegistrar anfBiasAddParser("BiasAdd", new AnfBiasAddPopulater());
} // namespace mindspore::lite

View File

@ -0,0 +1,29 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_BIASADD_PARSER_H
#define MINDSPORE_ANF_BIASADD_PARSER_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <vector>
namespace mindspore::lite {
class AnfBiasAddPopulater : public AnfNodePopulater {
public:
AnfBiasAddPopulater() = default;
~AnfBiasAddPopulater() override = default;
int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) override;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_BIASADD_PARSER_H

View File

@ -0,0 +1,121 @@
/**
* This is the C++ adaptation and derivative work of Myia (https://github.com/mila-iqia/myia/).
*
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_conv_populater.h"
#include <string>
#include <vector>
#include <memory>
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "ir/func_graph.h"
#include "ir/primitive.h"
namespace mindspore::lite {
int mindspore::lite::AnfConvPopulater::Parse(mindspore::CNodePtr cnodePtr, schema::CNodeT *node,
std::vector<schema::TensorT *> *outputs) {
auto p = GetCNodePrimitive(cnodePtr);
int group = GetValue<int>(p->GetAttr("group"));
if (group > 1) {
auto attr = std::make_unique<schema::DepthwiseConv2DT>();
auto format = GetValue<std::string>(p->GetAttr("data_format"));
if (format == "NCHW") {
attr->format = schema::Format_NCHW;
} else if (format == "NHWC") {
attr->format = schema::Format_NHWC;
} else {
attr->format = schema::Format_NUM_OF_FORMAT;
}
auto pad_list = GetValue<std::vector<int>>(p->GetAttr("pad_list"));
attr->padUp = pad_list[0];
attr->padDown = pad_list[1];
attr->padLeft = pad_list[2];
attr->padRight = pad_list[3];
auto dilation = GetValue<std::vector<int>>(p->GetAttr("dilation"));
attr->dilateH = dilation[0];
attr->dilateW = dilation[1];
auto kernel_size = GetValue<std::vector<int>>(p->GetAttr("kernel_size"));
attr->kernelH = kernel_size[0];
attr->kernelW = kernel_size[1];
auto stride = GetValue<std::vector<int>>(p->GetAttr("stride"));
attr->strideH = stride[2];
attr->strideW = stride[3];
auto pad_mode = GetValue<std::string>(p->GetAttr("pad_mode"));
if (pad_mode == "valid") {
attr->padMode = schema::PadMode_VALID;
} else if (pad_mode == "same") {
attr->padMode = schema::PadMode_SAME;
} else {
attr->padMode = schema::PadMode_NOTSET;
}
node->nodeType = schema::NodeType_CNode;
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_DepthwiseConv2D;
node->primitive->value.value = attr.release();
} else {
auto attr = std::make_unique<schema::Conv2DT>();
attr->group = group;
auto format = GetValue<std::string>(p->GetAttr("data_format"));
if (format == "NCHW") {
attr->format = schema::Format_NCHW;
} else if (format == "NHWC") {
attr->format = schema::Format_NHWC;
} else {
attr->format = schema::Format_NUM_OF_FORMAT;
}
auto pad_list = GetValue<std::vector<int>>(p->GetAttr("pad_list"));
attr->padUp = pad_list[0];
attr->padDown = pad_list[1];
attr->padLeft = pad_list[2];
attr->padRight = pad_list[3];
auto dilation = GetValue<std::vector<int>>(p->GetAttr("dilation"));
attr->dilateH = dilation[0];
attr->dilateW = dilation[1];
auto kernel_size = GetValue<std::vector<int>>(p->GetAttr("kernel_size"));
attr->kernelH = kernel_size[0];
attr->kernelW = kernel_size[1];
auto stride = GetValue<std::vector<int>>(p->GetAttr("stride"));
attr->strideH = stride[2];
attr->strideW = stride[3];
attr->channelOut = GetValue<int>(p->GetAttr("out_channel"));
auto pad_mode = GetValue<std::string>(p->GetAttr("pad_mode"));
if (pad_mode == "valid") {
attr->padMode = schema::PadMode_VALID;
} else if (pad_mode == "same") {
attr->padMode = schema::PadMode_SAME;
} else {
attr->padMode = schema::PadMode_NOTSET;
}
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_Conv2D;
node->primitive->value.value = attr.release();
}
return 0;
}
AnfNodePopulaterRegistrar anfConvParser("Conv2D", new AnfConvPopulater());
} // namespace mindspore::lite

View File

@ -0,0 +1,32 @@
/**
* This is the C++ adaptation and derivative work of Myia (https://github.com/mila-iqia/myia/).
*
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_CONV_PARSER_H
#define MINDSPORE_ANF_CONV_PARSER_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <vector>
namespace mindspore::lite {
class AnfConvPopulater : public AnfNodePopulater {
public:
AnfConvPopulater() = default;
~AnfConvPopulater() override = default;
int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) override;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_CONV_PARSER_H

View File

@ -0,0 +1,35 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_flatten_populater.h"
#include <vector>
#include <memory>
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "ir/func_graph.h"
#include "ir/primitive.h"
namespace mindspore::lite {
int mindspore::lite::AnfFlattenPopulater::Parse(mindspore::CNodePtr cnodePtr, schema::CNodeT *node,
std::vector<schema::TensorT *> *outputs) {
auto attr = std::make_unique<schema::FlattenT>();
node->nodeType = schema::NodeType_CNode;
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_Flatten;
node->primitive->value.value = attr.release();
return 0;
}
AnfNodePopulaterRegistrar anfFlattenParser("Flatten", new AnfFlattenPopulater());
} // namespace mindspore::lite

View File

@ -0,0 +1,29 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_FLATTEN_PARSER_H
#define MINDSPORE_ANF_FLATTEN_PARSER_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <vector>
namespace mindspore::lite {
class AnfFlattenPopulater : public AnfNodePopulater {
public:
AnfFlattenPopulater() = default;
~AnfFlattenPopulater() override = default;
int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) override;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_FLATTEN_PARSER_H

View File

@ -0,0 +1,38 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_matmul_populater.h"
#include <vector>
#include <memory>
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "ir/func_graph.h"
#include "ir/primitive.h"
namespace mindspore::lite {
int mindspore::lite::AnfMatmulPopulater::Parse(mindspore::CNodePtr cnodePtr, schema::CNodeT *node,
std::vector<schema::TensorT *> *outputs) {
auto p = GetCNodePrimitive(cnodePtr);
auto attr = std::make_unique<schema::MatMulT>();
attr->transposeA = GetValue<bool>(p->GetAttr("transpose_a"));
attr->transposeB = GetValue<bool>(p->GetAttr("transpose_b"));
node->nodeType = schema::NodeType_CNode;
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_MatMul;
node->primitive->value.value = attr.release();
return 0;
}
AnfNodePopulaterRegistrar anfMatmulParser("Matmul", new AnfMatmulPopulater());
} // namespace mindspore::lite

View File

@ -0,0 +1,29 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_MATMUL_PARSER_H
#define MINDSPORE_ANF_MATMUL_PARSER_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <vector>
namespace mindspore::lite {
class AnfMatmulPopulater : public AnfNodePopulater {
public:
AnfMatmulPopulater() = default;
~AnfMatmulPopulater() override = default;
int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) override;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_MATMUL_PARSER_H

View File

@ -0,0 +1,35 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_mul_populater.h"
#include <vector>
#include <memory>
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "ir/func_graph.h"
#include "ir/primitive.h"
namespace mindspore::lite {
int mindspore::lite::AnfMulPopulater::Parse(mindspore::CNodePtr cnodePtr, schema::CNodeT *node,
std::vector<schema::TensorT *> *outputs) {
auto attr = std::make_unique<schema::MulT>();
node->nodeType = schema::NodeType_CNode;
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_Mul;
node->primitive->value.value = attr.release();
return 0;
}
AnfNodePopulaterRegistrar anfMulParser("Mul", new AnfMulPopulater());
} // namespace mindspore::lite

View File

@ -0,0 +1,29 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_ACTIVATION_PARSER_H
#define MINDSPORE_ANF_ACTIVATION_PARSER_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <vector>
namespace mindspore::lite {
class AnfMulPopulater : public AnfNodePopulater {
public:
AnfMulPopulater() = default;
~AnfMulPopulater() override = default;
int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) override;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_ACTIVATION_PARSER_H

View File

@ -0,0 +1,19 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
namespace mindspore::lite {} // namespace mindspore::lite

View File

@ -0,0 +1,33 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_NODE_PARSER_H
#define MINDSPORE_ANF_NODE_PARSER_H
#include <vector>
#include "ir/anf.h"
#include "schema/inner/model_generated.h"
namespace mindspore::lite {
class AnfNodePopulater {
public:
AnfNodePopulater() = default;
virtual ~AnfNodePopulater() = default;
virtual int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) = 0;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_NODE_PARSER_H

View File

@ -0,0 +1,48 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include <string>
#include "src/common/anf_exporter/anf_populater/anf_biasadd_populater.h"
#include "src/common/anf_exporter/anf_populater/anf_conv_populater.h"
#include "src/common/anf_exporter/anf_populater/anf_matmul_populater.h"
#include "src/common/anf_exporter/anf_populater/anf_pool_populater.h"
#include "src/common/anf_exporter/anf_populater/anf_activation_populater.h"
#include "src/common/anf_exporter/anf_populater/anf_flatten_populater.h"
namespace mindspore {
namespace lite {
AnfNodePopulaterRegistry *AnfNodePopulaterRegistry::GetInstance() {
static AnfNodePopulaterRegistry instance;
instance.SetNodePopulater("BiasAdd", new AnfBiasAddPopulater());
instance.SetNodePopulater("Conv2D", new AnfConvPopulater());
instance.SetNodePopulater("MatMul", new AnfMatmulPopulater());
instance.SetNodePopulater("MaxPool", new AnfPoolPopulater());
instance.SetNodePopulater("ReLU", new AnfActivationPopulater());
instance.SetNodePopulater("Flatten", new AnfFlattenPopulater());
return &instance;
}
AnfNodePopulater *AnfNodePopulaterRegistry::GetNodePopulater(const std::string &name) {
if (parsers.find(name) == parsers.end()) {
return nullptr;
}
return parsers[name];
}
void AnfNodePopulaterRegistry::SetNodePopulater(const std::string &name, AnfNodePopulater *parser) {
parsers[name] = parser;
}
} // namespace lite
} // namespace mindspore

View File

@ -0,0 +1,43 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_NODE_PARSER_REGISTRY_H
#define MINDSPORE_ANF_NODE_PARSER_REGISTRY_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <unordered_map>
#include <string>
namespace mindspore::lite {
class AnfNodePopulaterRegistry {
public:
AnfNodePopulaterRegistry() = default;
virtual ~AnfNodePopulaterRegistry() = default;
static AnfNodePopulaterRegistry *GetInstance();
AnfNodePopulater *GetNodePopulater(const std::string &name);
void SetNodePopulater(const std::string &name, AnfNodePopulater *parser);
private:
std::unordered_map<std::string, AnfNodePopulater *> parsers;
};
class AnfNodePopulaterRegistrar {
public:
AnfNodePopulaterRegistrar(const std::string &name, AnfNodePopulater *parser) {
AnfNodePopulaterRegistry::GetInstance()->SetNodePopulater(name, parser);
}
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_NODE_PARSER_REGISTRY_H

View File

@ -0,0 +1,68 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_pool_populater.h"
#include <vector>
#include <string>
#include <memory>
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "ir/func_graph.h"
#include "ir/primitive.h"
namespace mindspore::lite {
int mindspore::lite::AnfPoolPopulater::Parse(mindspore::CNodePtr cnodePtr, schema::CNodeT *node,
std::vector<schema::TensorT *> *outputs) {
auto p = GetCNodePrimitive(cnodePtr);
auto attr = std::make_unique<schema::PoolingT>();
if (p->instance_name() == "MaxPool") {
attr->poolingMode = schema::PoolMode_MAX_POOLING;
} else if (p->instance_name() == "MeanPool") {
attr->poolingMode = schema::PoolMode_MEAN_POOLING;
}
auto format = GetValue<std::string>(p->GetAttr("data_format"));
if (format == "NCHW") {
attr->format = schema::Format_NCHW;
} else if (format == "NHWC") {
attr->format = schema::Format_NHWC;
} else {
attr->format = schema::Format_NUM_OF_FORMAT;
}
auto pad_mode = GetValue<std::string>(p->GetAttr("padding"));
if (pad_mode == "VALID") {
attr->padMode = schema::PadMode_VALID;
} else if (pad_mode == "SAME") {
attr->padMode = schema::PadMode_SAME;
} else {
attr->padMode = schema::PadMode_NOTSET;
}
auto kernel_size = GetValue<std::vector<int>>(p->GetAttr("ksize"));
attr->windowH = kernel_size[2];
attr->windowW = kernel_size[3];
auto stride = GetValue<std::vector<int>>(p->GetAttr("strides"));
attr->strideH = stride[2];
attr->strideW = stride[3];
node->nodeType = schema::NodeType_CNode;
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_Pooling;
node->primitive->value.value = attr.release();
return 0;
}
AnfNodePopulaterRegistrar anfMaxPoolParser("MaxPool", new AnfPoolPopulater());
} // namespace mindspore::lite

View File

@ -0,0 +1,29 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_POOL_PARSER_H
#define MINDSPORE_ANF_POOL_PARSER_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <vector>
namespace mindspore::lite {
class AnfPoolPopulater : public AnfNodePopulater {
public:
AnfPoolPopulater() = default;
~AnfPoolPopulater() override = default;
int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) override;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_POOL_PARSER_H

View File

@ -0,0 +1,40 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_reducemean_populater.h"
#include <vector>
#include <memory>
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "ir/func_graph.h"
#include "ir/primitive.h"
namespace mindspore::lite {
int mindspore::lite::AnfReduceMeanPopulater::Parse(CNodePtr cnodePtr, schema::CNodeT *node,
std::vector<schema::TensorT *> *outputs) {
auto p = GetCNodePrimitive(cnodePtr);
auto attr = std::make_unique<schema::ReduceT>();
attr->mode = schema::ReduceMode_ReduceMean;
attr->keepDims = GetValue<bool>(p->GetAttr("keep_dims"));
// attr->axes = GetValue<std::vector<int>>(p->GetAttr("shape"));
node->nodeType = schema::NodeType_CNode;
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_Reduce;
node->primitive->value.value = attr.release();
return 0;
}
AnfNodePopulaterRegistrar anfReduceMeanParser("ReduceMean", new AnfReduceMeanPopulater());
} // namespace mindspore::lite

View File

@ -0,0 +1,29 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_ACTIVATION_PARSER_H
#define MINDSPORE_ANF_ACTIVATION_PARSER_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <vector>
namespace mindspore::lite {
class AnfReduceMeanPopulater : public AnfNodePopulater {
public:
AnfReduceMeanPopulater() = default;
~AnfReduceMeanPopulater() override = default;
int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) override;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_ACTIVATION_PARSER_H

View File

@ -0,0 +1,34 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_tensoradd_populater.h"
#include <vector>
#include <memory>
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "ir/func_graph.h"
#include "ir/primitive.h"
namespace mindspore::lite {
int mindspore::lite::AnfTensorAddPopulater::Parse(mindspore::CNodePtr cnodePtr, schema::CNodeT *node,
std::vector<schema::TensorT *> *outputs) {
auto attr = std::make_unique<schema::AddT>();
node->nodeType = schema::NodeType_CNode;
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_Add;
node->primitive->value.value = attr.release();
return 0;
}
AnfNodePopulaterRegistrar anfTensorAddParser("TensorAdd", new AnfTensorAddPopulater());
} // namespace mindspore::lite

View File

@ -0,0 +1,29 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_ACTIVATION_PARSER_H
#define MINDSPORE_ANF_ACTIVATION_PARSER_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <vector>
namespace mindspore::lite {
class AnfTensorAddPopulater : public AnfNodePopulater {
public:
AnfTensorAddPopulater() = default;
~AnfTensorAddPopulater() override = default;
int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) override;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_ACTIVATION_PARSER_H

View File

@ -0,0 +1,34 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_exporter/anf_populater/anf_tuple_getitem_populater.h"
#include <vector>
#include <memory>
#include "src/common/anf_exporter/anf_populater/anf_node_populater_registry.h"
#include "ir/func_graph.h"
#include "ir/primitive.h"
namespace mindspore::lite {
int mindspore::lite::AnfTupleGetItemPopulater::Parse(CNodePtr cnodePtr, schema::CNodeT *node,
std::vector<schema::TensorT *> *outputs) {
auto attr = std::make_unique<schema::TupleGetItemT>();
node->nodeType = schema::NodeType_CNode;
node->primitive = std::make_unique<schema::PrimitiveT>();
node->primitive->value.type = schema::PrimitiveType_TupleGetItem;
node->primitive->value.value = attr.release();
return 0;
}
AnfNodePopulaterRegistrar anfTupleGetItemParser("tuple_getitem", new AnfTupleGetItemPopulater());
} // namespace mindspore::lite

View File

@ -0,0 +1,29 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_ANF_BATCHNORM_PARSER_H
#define MINDSPORE_ANF_BATCHNORM_PARSER_H
#include "src/common/anf_exporter/anf_populater/anf_node_populater.h"
#include <vector>
namespace mindspore::lite {
class AnfTupleGetItemPopulater : public AnfNodePopulater {
public:
AnfTupleGetItemPopulater() = default;
~AnfTupleGetItemPopulater() override = default;
int Parse(CNodePtr cnodePtr, schema::CNodeT *node, std::vector<schema::TensorT *> *outputs) override;
};
} // namespace mindspore::lite
#endif // MINDSPORE_ANF_BATCHNORM_PARSER_H

View File

@ -0,0 +1,184 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <utility>
#include <vector>
#include <string>
#include <memory>
#include "src/common/anf_importer/anf_importer.h"
#include "schema/model_generated.h"
#include "ir/dtype.h"
#include "ir/primitive.h"
#include "src/param_value_lite.h"
#include "frontend/operator/ops.h"
#include "abstract/abstract_value.h"
#include "src/ir/primitive_value.h"
#include "include/errorcode.h"
namespace mindspore {
namespace lite {
#if 0
PrimitivePtr SetConv2DAttr(const schema::CNode *cNode) {
MS_EXCEPTION_IF_NULL(cNode);
auto attrs = cNode->primitive()->value_as_Conv2D();
PrimitivePtr prim;
if (attrs->group() > 1) {
prim = std::make_shared<Primitive>("DepthwiseConv2D");
prim->set_instance_name("DepthwiseConv2D");
} else {
prim = std::make_shared<Primitive>("Conv2D");
prim->set_instance_name("Conv2D");
}
prim->set_attr("group", MakeValue<int>(attrs->group()));
prim->set_attr("format", MakeValue<int>(attrs->format()));
prim->set_attr("pad_mode", MakeValue<int>(attrs->padMode()));
std::vector<int> pad_list = {attrs->padUp(), attrs->padDown(), attrs->padLeft(), attrs->padRight()};
prim->set_attr("pad_list", MakeValue<std::vector<int>>(pad_list));
std::vector<int> dilate = {attrs->dilateH(), attrs->dilateW()};
prim->set_attr("dilation", MakeValue<std::vector<int>>(dilate));
std::vector<int> kernel_size = {attrs->kernelH(), attrs->kernelW()};
prim->set_attr("kernel_size", MakeValue<std::vector<int>>(kernel_size));
std::vector<int> stride = {1, 1, attrs->strideH(), attrs->strideW()};
prim->set_attr("stride", MakeValue<std::vector<int>>(stride));
prim->set_attr("out_channel", MakeValue<int>(attrs->channelOut()));
prim->set_attr("group", MakeValue<int>(attrs->group()));
return prim;
}
PrimitivePtr SetActivationAttr(const schema::CNode *cNode) {
MS_EXCEPTION_IF_NULL(cNode);
auto attrs = cNode->primitive()->value_as_Activation();
PrimitivePtr prim;
if (attrs->type() == schema::ActivationType_RELU) {
prim = std::make_shared<Primitive>("ReLU");
prim->set_instance_name("ReLU");
}
return prim;
}
PrimitivePtr SetPoolingAttr(const schema::CNode *cNode) {
MS_EXCEPTION_IF_NULL(cNode);
auto attrs = cNode->primitive()->value_as_Pooling();
PrimitivePtr prim;
if (attrs->poolingMode() == schema::PoolMode_MAX_POOLING) {
prim = std::make_shared<Primitive>("MaxPool");
prim->set_instance_name("MaxPool");
} else if (attrs->poolingMode() == schema::PoolMode_MEAN_POOLING) {
prim = std::make_shared<Primitive>("MeanPool");
prim->set_instance_name("MeanPool");
}
prim->set_attr("format", MakeValue<int>(attrs->format()));
prim->set_attr("pad_mode", MakeValue<int>(attrs->padMode()));
prim->set_attr("ksize", MakeValue<std::vector<int>>(std::vector<int>({1, 1, attrs->windowH(), attrs->windowW()})));
prim->set_attr("strides", MakeValue<std::vector<int>>(std::vector<int>({1, 1, attrs->strideH(), attrs->strideW()})));
return prim;
}
PrimitivePtr SetFlattenAttr(const schema::CNode *cNode) {
MS_EXCEPTION_IF_NULL(cNode);
auto prim = std::make_shared<Primitive>("Flatten");
prim->set_instance_name("Flatten");
return prim;
}
PrimitivePtr SetMatmulAttr(const schema::CNode *cNode) {
MS_EXCEPTION_IF_NULL(cNode);
auto attrs = cNode->primitive()->value_as_MatMul();
auto prim = std::make_shared<Primitive>("Matmul");
prim->set_instance_name("Matmul");
prim->set_attr("transpose_a", MakeValue<int>(attrs->transposeA()));
prim->set_attr("transpose_b", MakeValue<int>(attrs->transposeB()));
return prim;
}
PrimitivePtr SetMulAttr(const schema::CNode *cNode) {
MS_EXCEPTION_IF_NULL(cNode);
// auto attrs = nodedef->attr_as_Mul();
auto prim = std::make_shared<Primitive>("Mul");
prim->set_instance_name("Mul");
return prim;
}
PrimitivePtr SetSigmoidAttr(const schema::CNode *cNode) {
MS_EXCEPTION_IF_NULL(cNode);
auto prim = std::make_shared<Primitive>("Sigmoid");
prim->set_instance_name("Sigmoid");
return prim;
}
PrimitivePtr SetReduceAttr(const schema::CNode *cNode) {
MS_EXCEPTION_IF_NULL(cNode);
auto prim = std::make_shared<Primitive>("ReduceMean");
prim->set_instance_name("ReduceMean");
return prim;
}
PrimitivePtr SetBatchNormAttr(const schema::CNode *cNode) {
MS_EXCEPTION_IF_NULL(cNode);
auto attrs = cNode->primitive_as_BatchNorm();
auto prim = std::make_shared<Primitive>("BatchNorm");
prim->set_attr("is_training", MakeValue<bool>(attrs->is_training()));
prim->set_instance_name("BatchNorm");
return prim;
}
PrimitivePtr SetBiasAddAttr(const schema::CNode *cNode) {
MS_EXCEPTION_IF_NULL(cNode);
auto prim = std::make_shared<Primitive>("BiasAdd");
prim->set_instance_name("BiasAdd");
return prim;
}
PrimitivePtr SetAddAttr(const schema::CNode *cNode) {
MS_EXCEPTION_IF_NULL(cNode);
auto prim = std::make_shared<Primitive>("Add");
prim->set_instance_name("Add");
return prim;
}
void MinnieBuildGraph::FbTest(const GraphDef *graph_def) {
auto node_def = graph_def->subgraphs()->begin()->nodes()->GetAs<OpDef>(3);
PrimitivePtr prim = ConverterOperatorAttr(node_def);
if (prim->GetAttr("format")) MS_LOG(INFO) << "find format";
if (prim->GetAttr("group")) MS_LOG(INFO) << "find group";
}
#endif
int AnfImporter::Import() {
ConverterConstTensor();
auto ret = ConverterCNode();
if (RET_OK != ret) {
MS_LOG(ERROR) << "ConverterCNode failed " << ret;
return ret;
}
AddReturnCNode();
return RET_OK;
}
AnfNodePtr AnfImporter::GetNode(int tensor_id) {
auto n = nodes_.find(tensor_id);
if (n == nodes_.end()) {
return nullptr;
}
return n->second;
}
void AnfImporter::AddNode(int tensor_id, AnfNodePtr node) { nodes_[tensor_id] = std::move(node); }
} // namespace lite
} // namespace mindspore

View File

@ -0,0 +1,54 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_SRC_ANF_IMPORTER_ANF_IMPORTER_H_
#define MINDSPORE_LITE_SRC_ANF_IMPORTER_ANF_IMPORTER_H_
#include <unordered_map>
#include "ir/func_graph.h"
#include "ir/anf.h"
#include "base/base.h"
namespace mindspore::lite {
class AnfImporter {
public:
AnfImporter() = default;
virtual ~AnfImporter() = default;
virtual int Import();
virtual FuncGraphPtr GetResult() = 0;
protected:
// convert const tensor into parameter and save in nodes_
virtual void ConverterConstTensor() = 0;
// convert other node into cnode and save in nodes_
virtual int ConverterCNode() = 0;
virtual void AddReturnCNode() = 0;
AnfNodePtr GetNode(int tensor_id);
void AddNode(int tensor_id, AnfNodePtr node);
protected:
std::unordered_map<int, AnfNodePtr> nodes_;
};
} // namespace mindspore::lite
#endif // MINDSPORE_LITE_SRC_ANF_IMPORTER_ANF_IMPORTER_H_

View File

@ -0,0 +1,122 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_importer/import_from_meta_graph.h"
#include <string>
#include <vector>
#include <memory>
#include "frontend/operator/ops.h"
#include "src/param_value_lite.h"
#include "utils/log_adapter.h"
#include "abstract/abstract_value.h"
#include "src/ir/primitive_value.h"
#include "include/errorcode.h"
namespace mindspore::lite {
void AnfImporterFromMetaGraph::ConverterConstTensor() {
MS_EXCEPTION_IF_NULL(model);
auto *meta_graph = model->GetMetaGraph();
MS_EXCEPTION_IF_NULL(meta_graph);
for (size_t i = 0; i < meta_graph->allTensors()->size(); i++) {
auto *tensor = meta_graph->allTensors()->GetAs<schema::Tensor>(i);
MS_EXCEPTION_IF_NULL(tensor);
if (tensor->nodeType() != schema::NodeType_ValueNode) {
continue;
}
MS_ASSERT(tensor->dims() != nullptr);
auto parameter = model->add_parameter();
std::vector<int> shape;
for (size_t j = 0; j < tensor->dims()->size(); ++j) {
shape.push_back(tensor->dims()->data()[j]);
}
auto type_id = static_cast<TypeId>(tensor->dataType());
auto type_ptr = TypeIdToType(type_id);
auto abstract_tensor = std::make_shared<abstract::AbstractTensor>(type_ptr, shape);
parameter->set_abstract(abstract_tensor);
ParamValueLitePtr param_value = std::make_shared<ParamValueLite>();
MS_EXCEPTION_IF_NULL(param_value);
param_value->set_tensor_shape(shape);
param_value->set_tensor_type(type_id);
if (tensor->data() != nullptr) {
auto size = tensor->data()->size();
char *tensor_data = new char[size]();
std::memcpy(tensor_data, tensor->data()->data(), size);
MS_EXCEPTION_IF_NULL(tensor_data);
param_value->set_tensor_addr(tensor_data);
param_value->set_tensor_size(size);
}
parameter->set_default_param(param_value);
AddNode(i, parameter);
}
}
int AnfImporterFromMetaGraph::ConverterCNode() {
MS_EXCEPTION_IF_NULL(model);
auto *meta_graph = model->GetMetaGraph();
MS_EXCEPTION_IF_NULL(meta_graph);
auto cNodes = meta_graph->nodes();
for (size_t i = 0; i < cNodes->size(); i++) {
auto cNode = cNodes->GetAs<schema::CNode>(i);
MS_EXCEPTION_IF_NULL(cNode);
auto tensor_id = cNode->outputIndex()->data()[0];
if (GetNode(tensor_id)) {
continue;
}
auto prim = std::make_shared<PrimitiveValue>(model->GetOp(cNode->name()->str()));
if (prim == nullptr) {
MS_LOG(ERROR) << "th tensorDef in subGraphDef is nullptr";
return RET_ERROR;
}
auto value_node = NewValueNode(prim);
AddNode(tensor_id, value_node);
std::vector<AnfNodePtr> op_inputs = {value_node};
MS_EXCEPTION_IF_NULL(cNode->inputIndex());
for (size_t j = 0; j < cNode->inputIndex()->size(); j++) {
auto node = GetNode(*(cNode->inputIndex()->GetAs<uint32_t>(j)));
if (nullptr == node) {
MS_LOG(ERROR) << "Can't find input node.";
return RET_ERROR;
}
// todo: CheckInputNodeType, the first node should be op;
op_inputs.push_back(node);
}
auto cnode = model->NewCNode(op_inputs);
auto node_name = std::string(cNode->name()->c_str());
cnode->set_fullname_with_scope(node_name);
AddNode(tensor_id, cnode);
}
return RET_OK;
}
void AnfImporterFromMetaGraph::AddReturnCNode() {
MS_EXCEPTION_IF_NULL(model);
auto *meta_graph = model->GetMetaGraph();
MS_EXCEPTION_IF_NULL(meta_graph);
std::vector<AnfNodePtr> op_inputs;
auto value_node = NewValueNode(prim::kPrimReturn);
op_inputs.push_back(value_node);
auto tensor_id = meta_graph->outputIndex()->data()[0];
op_inputs.push_back(GetNode(tensor_id));
auto cnode = model->NewCNode(op_inputs);
cnode->set_fullname_with_scope("return");
model->set_return(cnode);
}
FuncGraphPtr AnfImporterFromMetaGraph::GetResult() { return this->model; }
} // namespace mindspore::lite

View File

@ -0,0 +1,47 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_SRC_ANF_IMPORTER_IMPORTER_FROM_META_GRAPH_H_
#define MINDSPORE_LITE_SRC_ANF_IMPORTER_IMPORTER_FROM_META_GRAPH_H_
#include <memory>
#include "src/train/model_impl.h"
#include "schema/model_generated.h"
#include "src/common/anf_importer/anf_importer.h"
namespace mindspore::lite {
class AnfImporterFromMetaGraph : public AnfImporter {
public:
explicit AnfImporterFromMetaGraph(std::shared_ptr<ModelImpl> model) : model(model) {}
~AnfImporterFromMetaGraph() override = default;
FuncGraphPtr GetResult() override;
private:
void ConverterConstTensor() override;
int ConverterCNode() override;
void AddReturnCNode() override;
private:
std::shared_ptr<ModelImpl> model = nullptr;
};
} // namespace mindspore::lite
#endif // MINDSPORE_LITE_SRC_ANF_IMPORTER_IMPORTER_FROM_META_GRAPH_H_

View File

@ -0,0 +1,123 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <vector>
#include <memory>
#include "schema/inner/model_generated.h"
#include "frontend/operator/ops.h"
#include "src/param_value_lite.h"
#include "import_from_meta_graphT.h"
#include "utils/log_adapter.h"
#include "abstract/abstract_value.h"
#include "src/ir/primitive_value.h"
#include "src/ir/primitive_t_value.h"
#include "include/errorcode.h"
#include "src/ops/ops.h"
namespace mindspore::lite {
void AnfImporterFromMetaGraphT::ConverterConstTensor() {
MS_EXCEPTION_IF_NULL(meta_graph_);
MS_EXCEPTION_IF_NULL(func_graph_);
for (size_t i = 0; i < meta_graph_->allTensors.size(); i++) {
auto &tensor = meta_graph_->allTensors.at(i);
MS_EXCEPTION_IF_NULL(tensor);
if (tensor->nodeType != schema::NodeType_ValueNode) {
continue;
}
MS_ASSERT(tensor->dims() != nullptr);
auto parameter = func_graph_->add_parameter();
std::vector<int> shape;
for (int &dim : tensor->dims) {
shape.push_back(dim);
}
auto type_id = static_cast<TypeId>(tensor->dataType);
auto type_ptr = TypeIdToType(type_id);
auto abstract_tensor = std::make_shared<abstract::AbstractTensor>(type_ptr, shape);
parameter->set_abstract(abstract_tensor);
ParamValueLitePtr param_value = std::make_shared<ParamValueLite>();
MS_EXCEPTION_IF_NULL(param_value);
param_value->set_tensor_shape(shape);
param_value->set_tensor_type(type_id);
if (!tensor->data.empty()) {
auto size = tensor->data.size();
char *tensor_data = new char[size];
std::memcpy(tensor_data, tensor->data.data(), size);
MS_EXCEPTION_IF_NULL(tensor_data);
param_value->set_tensor_addr(tensor_data);
param_value->set_tensor_size(size);
}
parameter->set_default_param(param_value);
AddNode(i, parameter);
}
}
int AnfImporterFromMetaGraphT::ConverterCNode() {
MS_EXCEPTION_IF_NULL(meta_graph_);
MS_EXCEPTION_IF_NULL(func_graph_);
for (size_t i = 0; i < meta_graph_->nodes.size(); i++) {
auto &cNode = meta_graph_->nodes.at(i);
MS_EXCEPTION_IF_NULL(cNode);
auto tensor_id = cNode->outputIndex.front();
if (nullptr != GetNode(tensor_id)) {
continue;
}
auto primTValue = std::make_shared<PrimitiveTValue>(cNode->primitive.release());
cNode->primitive = nullptr;
auto value_node = NewValueNode(primTValue);
std::vector<AnfNodePtr> op_inputs = {value_node};
for (size_t j = 0; j < cNode->inputIndex.size(); j++) {
auto node = GetNode(cNode->inputIndex.at(j));
if (nullptr == node) {
MS_LOG(ERROR) << "Can't find input node.";
return RET_ERROR;
}
// todo: CheckInputNodeType, the first node should be op;
op_inputs.push_back(node);
}
auto cnode = func_graph_->NewCNode(op_inputs);
cnode->set_fullname_with_scope(cNode->name);
AddNode(tensor_id, cnode);
}
return RET_OK;
}
void AnfImporterFromMetaGraphT::AddReturnCNode() {
MS_EXCEPTION_IF_NULL(meta_graph_);
MS_EXCEPTION_IF_NULL(func_graph_);
std::vector<AnfNodePtr> make_tuple_inputs;
auto make_tuple_value_node = NewValueNode(prim::kPrimMakeTuple);
make_tuple_inputs.emplace_back(make_tuple_value_node);
for (auto tensor_id : meta_graph_->outputIndex) {
make_tuple_inputs.emplace_back(GetNode(tensor_id));
}
auto make_tuple_cnode = func_graph_->NewCNode(make_tuple_inputs);
make_tuple_cnode->set_fullname_with_scope("return tuple");
std::vector<AnfNodePtr> op_inputs;
auto value_node = NewValueNode(prim::kPrimReturn);
op_inputs.emplace_back(value_node);
op_inputs.emplace_back(make_tuple_cnode);
auto cnode = func_graph_->NewCNode(op_inputs);
cnode->set_fullname_with_scope("return");
func_graph_->set_return(cnode);
}
FuncGraphPtr AnfImporterFromMetaGraphT::GetResult() { return this->func_graph_; }
} // namespace mindspore::lite

View File

@ -0,0 +1,49 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_SRC_ANF_IMPORTER_IMPORTER_FROM_META_GRAPHT_H_
#define MINDSPORE_LITE_SRC_ANF_IMPORTER_IMPORTER_FROM_META_GRAPHT_H_
#include <utility>
#include "schema/inner/model_generated.h"
#include "src/common/anf_importer/anf_importer.h"
namespace mindspore::lite {
class AnfImporterFromMetaGraphT : public AnfImporter {
public:
explicit AnfImporterFromMetaGraphT(schema::MetaGraphT *meta_graph, FuncGraphPtr func_graph)
: meta_graph_(meta_graph), func_graph_(std::move(func_graph)) {}
~AnfImporterFromMetaGraphT() override = default;
FuncGraphPtr GetResult() override;
private:
void ConverterConstTensor() override;
int ConverterCNode() override;
void AddReturnCNode() override;
private:
schema::MetaGraphT *meta_graph_;
FuncGraphPtr func_graph_;
};
} // namespace mindspore::lite
#endif // MINDSPORE_LITE_SRC_ANF_IMPORTER_IMPORTER_FROM_META_GRAPHT_H_

View File

@ -0,0 +1,717 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/common/anf_importer/import_from_protobuf.h"
#include <fcntl.h>
#include <unistd.h>
#include <functional>
#include <map>
#include <stack>
#include <unordered_map>
#include <memory>
#include <string>
#include <vector>
#include <fstream>
#include "ir/func_graph.h"
#include "ir/anf.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "src/param_value_lite.h"
#include "src/ir/tensor.h"
#include "frontend/operator/ops.h"
#include "tools/converter/parser/onnx/onnx.pb.h"
#include "utils/log_adapter.h"
#include "include/errorcode.h"
using string = std::string;
using int32 = int32_t;
using int64 = int64_t;
using uint64 = uint64_t;
namespace mindspore::lite {
static constexpr char kConstantValueNode[] = "Constant";
static constexpr char kCNodeShapeAttr[] = "shape";
static constexpr char kCNodeShape1Attr[] = "shape1";
static constexpr char kCNodeShape2Attr[] = "shape2";
enum ParseForm : int {
FORM_PARSE_TYPE = 0,
FORM_PARSE_SCALAR = 1,
FORM_PARSE_TENSOR = 2,
};
static std::map<std::string, ParseForm> kParseTypeSwitchMap{
{"type", FORM_PARSE_TYPE},
{"scalar", FORM_PARSE_SCALAR},
{"tensor", FORM_PARSE_TENSOR}};
static std::unordered_map<int, TypeId> kDefaultValueSwitchMap{
{onnx::TensorProto_DataType_BOOL, kNumberTypeBool},
{onnx::TensorProto_DataType_INT8, kNumberTypeInt8},
{onnx::TensorProto_DataType_INT16, kNumberTypeInt16},
{onnx::TensorProto_DataType_INT32, kNumberTypeInt32},
{onnx::TensorProto_DataType_INT64, kNumberTypeInt64},
{onnx::TensorProto_DataType_UINT8, kNumberTypeUInt8},
{onnx::TensorProto_DataType_UINT16, kNumberTypeUInt16},
{onnx::TensorProto_DataType_UINT32, kNumberTypeUInt32},
{onnx::TensorProto_DataType_UINT64, kNumberTypeUInt64},
{onnx::TensorProto_DataType_FLOAT16, kNumberTypeFloat16},
{onnx::TensorProto_DataType_FLOAT, kNumberTypeFloat32},
{onnx::TensorProto_DataType_DOUBLE, kNumberTypeFloat64},
{onnx::TensorProto_DataType_STRING, kObjectTypeString},
};
std::shared_ptr<ValueTuple> ParserScalarAttrValue(const std::string &attr_name,
const std::unordered_map<string, ValuePtr> &kv) {
std::string str = attr_name;
auto replace = [&](const string &orgStr, const string &newStr) {
std::string::size_type pos(0);
while ((pos = str.find(orgStr)) != std::string::npos) {
str.replace(pos, orgStr.length(), newStr);
}
return str;
};
// remove "scalar:"
str = replace("scalar:", "");
// remove "Tuple"
str = replace("Tuple", "");
// remove "List"
str = replace("List", "");
std::stack<std::string> rules;
std::stack<ValuePtr> value;
int num = 0, count = 0;
for (size_t i = 0; i < str.length(); i++) {
if (str[i] == '[') {
rules.push("[");
} else if (str[i] == ']') {
// rules
std::vector<ValuePtr> vec;
while (rules.top() != "[") {
rules.pop();
vec.push_back(value.top());
value.pop();
}
// pop "["
rules.pop();
// make tuple for names
std::string res = "dummy";
// make tuple for values
reverse(vec.begin(), vec.end());
auto vt = std::make_shared<ValueTuple>(vec);
if (rules.empty() && value.empty()) {
return vt;
}
rules.push(res);
value.push(vt);
} else if (str[i] == ',') {
continue;
} else {
count++;
if (str[i + 1] == '[' || str[i + 1] == ']' || str[i + 1] == ',') {
auto value_name = str.substr(i - count + 1, count);
value.push(kv.at(value_name));
rules.push(value_name);
count = 0;
num++;
}
}
}
return {};
}
std::shared_ptr<abstract::AbstractTuple>
ParserAttrShape(const std::string &attr_name, const std::unordered_map<string, abstract::AbstractTensorPtr> &kv) {
std::string str = attr_name;
auto replace = [&](const string &orgStr, const string &newStr) {
std::string::size_type pos(0);
while ((pos = str.find(orgStr)) != std::string::npos) {
str.replace(pos, orgStr.length(), newStr);
}
return str;
};
// remove "scalar:"
str = replace("shape:", "");
// remove "Tuple"
str = replace("Tuple", "");
// remove "List"
str = replace("List", "");
std::stack<std::string> rules;
std::stack<abstract::AbstractBasePtr> value;
int num = 0, count = 0;
for (size_t i = 0; i < str.length(); i++) {
if (str[i] == '[') {
rules.push("[");
} else if (str[i] == ']') {
// rules
std::vector<abstract::AbstractBasePtr> vec;
while (rules.top() != "[") {
rules.pop();
vec.push_back(value.top());
value.pop();
}
// pop "["
rules.pop();
// make tuple for names
std::string res = "dummy";
// make tuple for values
reverse(vec.begin(), vec.end());
auto vt = std::make_shared<abstract::AbstractTuple>(vec);
if (rules.empty() && value.empty()) {
return vt;
}
rules.push(res);
value.push(vt);
} else if (str[i] == ',') {
continue;
} else {
count++;
if (str[i + 1] == '[' || str[i + 1] == ']' || str[i + 1] == ',') {
auto value_name = str.substr(i - count + 1, count);
value.push(kv.at(value_name));
rules.push(value_name);
count = 0;
num++;
}
}
}
return {};
}
#define PARSE_ONNXATTR_IN_SCALAR_FORM(type, valuetype) \
ValuePtr ParseAttrInScalar_##type##_##valuetype(const onnx::TensorProto &attr_tensor) { \
if (attr_tensor.type##_data_size() == 1) { \
auto value = static_cast<valuetype>(attr_tensor.type##_data(0)); \
return MakeValue<valuetype>(value); \
} else { \
MS_LOG(ERROR) << "size of scalar tensor doesn't equal 1!"; \
} \
return{}; \
}
PARSE_ONNXATTR_IN_SCALAR_FORM(double, double)
PARSE_ONNXATTR_IN_SCALAR_FORM(float, float)
PARSE_ONNXATTR_IN_SCALAR_FORM(string, string)
PARSE_ONNXATTR_IN_SCALAR_FORM(int32, int32)
PARSE_ONNXATTR_IN_SCALAR_FORM(int32, bool)
PARSE_ONNXATTR_IN_SCALAR_FORM(int64, int64)
PARSE_ONNXATTR_IN_SCALAR_FORM(uint64, uint64)
bool AnfImporterFromProtobuf::BuildParameterForFuncGraph(const ParameterPtr &node,
const onnx::ValueInfoProto &value_proto) {
MS_EXCEPTION_IF_NULL(node);
if (!value_proto.has_type() || !value_proto.has_name()) {
MS_LOG(ERROR) << "onnx ValueInfoProto has no type or name! ";
return false;
}
node->set_name(value_proto.name());
const auto &type_proto = value_proto.type();
if (!type_proto.has_tensor_type()) {
MS_LOG(ERROR) << "onnx TypeProto has no tesor_type! ";
return false;
}
const onnx::TypeProto_Tensor &tensor_typeproto = type_proto.tensor_type();
if (!tensor_typeproto.has_elem_type() || !tensor_typeproto.has_shape()) {
MS_LOG(ERROR) << "onnx TypeProto_Tensor has no elem_type or shape! ";
return false;
}
const onnx::TensorShapeProto &tensor_shape = tensor_typeproto.shape();
std::vector<int> shape;
for (int i = 0; i < tensor_shape.dim_size(); ++i) {
shape.push_back(tensor_shape.dim(i).dim_value());
}
if (kDefaultValueSwitchMap.find(tensor_typeproto.elem_type()) == kDefaultValueSwitchMap.end()) {
MS_LOG(ERROR) << "onnx TypeProto_Tensor elem_type is not support yet!";
return false;
}
auto type_ptr = TypeIdToType(kDefaultValueSwitchMap[tensor_typeproto.elem_type()]);
auto abstract_tensor = std::make_shared<abstract::AbstractTensor>(type_ptr, shape);
node->set_abstract(abstract_tensor);
if (default_para_map_.find(value_proto.name()) != default_para_map_.end()) {
tensor::Tensor *tensor_info = new tensor::Tensor(kDefaultValueSwitchMap[tensor_typeproto.elem_type()], shape);
MS_EXCEPTION_IF_NULL(tensor_info);
tensor_info->MallocData();
const onnx::TensorProto initialize_proto = default_para_map_[value_proto.name()];
std::string initial_data = initialize_proto.raw_data();
auto *tensor_data_buf = reinterpret_cast<uint8_t *>(tensor_info->Data());
MS_EXCEPTION_IF_NULL(tensor_data_buf);
memcpy_s(tensor_data_buf, tensor_info->Size(), initial_data.data(), initial_data.size());
ParamValueLitePtr param_value = std::make_shared<ParamValueLite>();
MS_EXCEPTION_IF_NULL(param_value);
param_value->set_tensor_addr(tensor_data_buf);
param_value->set_tensor_size(tensor_info->Size());
node->set_default_param(param_value);
}
anfnode_build_map_[value_proto.name()] = node;
return true;
}
bool AnfImporterFromProtobuf::ImportParametersForGraph(const FuncGraphPtr &outputFuncGraph,
const onnx::GraphProto &importProto) {
MS_EXCEPTION_IF_NULL(outputFuncGraph);
MS_LOG(INFO) << "Parameters had default paramerer size is: " << importProto.initializer_size();
for (int i = 0; i < importProto.initializer_size(); ++i) {
const onnx::TensorProto &initializer_proto = importProto.initializer(i);
if (!initializer_proto.has_name()) {
MS_LOG(ERROR) << "initializer vector of onnx GraphProto has no name at index: " << i;
return false;
}
default_para_map_[initializer_proto.name()] = initializer_proto;
}
MS_LOG(INFO) << "all parameters size: " << importProto.input_size();
for (int i = 0; i < importProto.input_size(); ++i) {
const onnx::ValueInfoProto &input_proto = importProto.input(i);
if (!BuildParameterForFuncGraph(outputFuncGraph->add_parameter(), input_proto)) {
MS_LOG(ERROR) << "Build parameter for funcgraph fail at index: " << i;
return false;
}
}
return true;
}
bool AnfImporterFromProtobuf::ObtainCNodeAttrInTypeForm(const PrimitivePtr &prim, const std::string &attr_name,
const onnx::TensorProto &attr_tensor) {
MS_EXCEPTION_IF_NULL(prim);
const int attr_tensor_type = attr_tensor.data_type();
if (kDefaultValueSwitchMap.find(attr_tensor_type) == kDefaultValueSwitchMap.end()) {
MS_LOG(ERROR) << "Obtain attr in type-form has not support input type:" << attr_tensor_type;
return false;
}
prim->AddAttr(attr_name, TypeIdToType(kDefaultValueSwitchMap[attr_tensor_type]));
return true;
}
ValuePtr AnfImporterFromProtobuf::ObtainCNodeAttrInScalarForm(const onnx::TensorProto &attr_tensor) {
const int attr_tensor_type = attr_tensor.data_type();
switch (attr_tensor_type) {
case onnx::TensorProto_DataType_STRING: {
return ParseAttrInScalar_string_string(attr_tensor);
}
case onnx::TensorProto_DataType_INT32: {
return ParseAttrInScalar_int32_int32(attr_tensor);
}
case onnx::TensorProto_DataType_INT64: {
return ParseAttrInScalar_int64_int64(attr_tensor);
}
case onnx::TensorProto_DataType_UINT64: {
return ParseAttrInScalar_uint64_uint64(attr_tensor);
}
case onnx::TensorProto_DataType_FLOAT: {
return ParseAttrInScalar_float_float(attr_tensor);
}
case onnx::TensorProto_DataType_DOUBLE: {
return ParseAttrInScalar_double_double(attr_tensor);
}
case onnx::TensorProto_DataType_BOOL: {
return ParseAttrInScalar_int32_bool(attr_tensor);
}
default:
MS_LOG(ERROR) << "Obtain attr in scalar-form has not support input type: " << attr_tensor_type;
return {};
}
return {};
}
bool AnfImporterFromProtobuf::ObtainCNodeAttrInTensorForm(const PrimitivePtr &prim, const std::string &attr_name,
const onnx::TensorProto &attr_tensor) {
MS_EXCEPTION_IF_NULL(prim);
MS_LOG(ERROR) << "parse attr type don't support attr type is tensor";
return false;
}
bool AnfImporterFromProtobuf::GetAttrValueForCNode(const PrimitivePtr &prim, const onnx::AttributeProto &attr_proto) {
MS_EXCEPTION_IF_NULL(prim);
const std::string &attr_name = attr_proto.name();
if (!attr_proto.has_ref_attr_name()) {
MS_LOG(ERROR) << "CNode parse attr type has no ref_attr_name";
return false;
}
const std::string &ref_attr_name = attr_proto.ref_attr_name();
string type;
std::size_t pos(0);
if ((pos = ref_attr_name.find("scalar:")) != std::string::npos) {
type = ref_attr_name.substr(pos, string("scalar:").length() - 1);
} else if ((pos = ref_attr_name.find("type:")) != std::string::npos) {
type = ref_attr_name.substr(pos, string("type:").length() - 1);
} else if ((pos = ref_attr_name.find("tensor:")) != std::string::npos) {
type = ref_attr_name.substr(pos, string("tensor:").length() - 1);
}
std::unordered_map<std::string, ValuePtr> kv;
for (int i = 0; i < attr_proto.tensors_size(); i++) {
const onnx::TensorProto &attr_tensor = attr_proto.tensors(i);
switch (kParseTypeSwitchMap[type]) {
case FORM_PARSE_TYPE: {
return ObtainCNodeAttrInTypeForm(prim, attr_name, attr_tensor);
}
case FORM_PARSE_SCALAR: {
auto res = ObtainCNodeAttrInScalarForm(attr_tensor);
kv.insert(std::pair<string, ValuePtr>(attr_tensor.name(), res));
break;
}
case FORM_PARSE_TENSOR: {
return ObtainCNodeAttrInTensorForm(prim, attr_name, attr_tensor);
}
default:
MS_LOG(ERROR) << "parse attr type don't support input of ref_attr_name";
return false;
}
}
if (kParseTypeSwitchMap[type] == FORM_PARSE_SCALAR) {
if (kv.size() == 1) {
std::unordered_map<std::string, ValuePtr>::iterator iter = kv.begin();
prim->AddAttr(attr_name, iter->second);
} else {
auto res = ParserScalarAttrValue(ref_attr_name, kv);
prim->AddAttr(attr_name, res);
}
}
return true;
}
bool AnfImporterFromProtobuf::ObtainValueNodeInTensorForm(const std::string &value_node_name,
const onnx::TensorProto &attr_tensor) {
const int attr_tensor_type = attr_tensor.data_type();
std::vector<int> shape;
for (int i = 0; i < attr_tensor.dims_size(); ++i) {
shape.push_back(attr_tensor.dims(i));
}
tensor::TensorPtr tensor_info = std::make_shared<tensor::Tensor>(kDefaultValueSwitchMap[attr_tensor_type], shape);
tensor_info->MallocData();
const std::string &tensor_buf = attr_tensor.raw_data();
auto *tensor_data_buf = reinterpret_cast<uint8_t *>(tensor_info->Data());
memcpy_s(tensor_data_buf, tensor_info->Size(), tensor_buf.data(), tensor_buf.size());
auto new_value_node = NewValueNode(MakeValue(tensor_info));
MS_EXCEPTION_IF_NULL(new_value_node);
auto type_ptr = TypeIdToType(kDefaultValueSwitchMap[attr_tensor_type]);
auto abstract_tensor = std::make_shared<abstract::AbstractTensor>(type_ptr, shape);
new_value_node->set_abstract(abstract_tensor);
anfnode_build_map_[value_node_name] = new_value_node;
return true;
}
bool AnfImporterFromProtobuf::ObtainValueNodeInTypeForm(const std::string &value_node_name,
const onnx::TensorProto &attr_tensor) {
const int attr_tensor_type = attr_tensor.data_type();
if (kDefaultValueSwitchMap.find(attr_tensor_type) == kDefaultValueSwitchMap.end()) {
MS_LOG(ERROR) << "Obtain ValueNode attr in type-form has not support input type: " << attr_tensor_type;
return false;
}
auto new_value_node = NewValueNode(TypeIdToType(kDefaultValueSwitchMap[attr_tensor_type]));
abstract::AbstractTypePtr abs_type = std::make_shared<abstract::AbstractType>(std::make_shared<TypeType>());
new_value_node->set_abstract(abs_type);
anfnode_build_map_[value_node_name] = new_value_node;
return true;
}
bool AnfImporterFromProtobuf::GetAttrValueForValueNode(const std::string &value_node_name,
const onnx::AttributeProto &attr_proto) {
const std::string &attr_name = attr_proto.name();
if (!attr_proto.has_ref_attr_name()) {
MS_LOG(ERROR) << "CNode parse attr type has no ref_attr_name";
return false;
}
const std::string &ref_attr_name = attr_proto.ref_attr_name();
string type;
std::size_t pos(0);
if ((pos = ref_attr_name.find("scalar:")) != std::string::npos) {
type = ref_attr_name.substr(pos, string("scalar:").length() - 1);
} else if ((pos = ref_attr_name.find("type:")) != std::string::npos) {
type = ref_attr_name.substr(pos, string("type:").length() - 1);
} else if ((pos = ref_attr_name.find("tensor:")) != std::string::npos) {
type = ref_attr_name.substr(pos, string("tensor:").length() - 1);
}
std::unordered_map<std::string, ValuePtr> kv;
for (int i = 0; i < attr_proto.tensors_size(); i++) {
const onnx::TensorProto &attr_tensor = attr_proto.tensors(i);
switch (kParseTypeSwitchMap[type]) {
case FORM_PARSE_TYPE: {
return ObtainValueNodeInTypeForm(value_node_name, attr_tensor);
}
case FORM_PARSE_SCALAR: {
auto res = ObtainCNodeAttrInScalarForm(attr_tensor);
kv.insert(std::pair<string, ValuePtr>(attr_tensor.name(), res));
break;
}
case FORM_PARSE_TENSOR: {
return ObtainValueNodeInTensorForm(value_node_name, attr_tensor);
}
default:
MS_LOG(ERROR) << "parse attr type don't support input of ref_attr_name";
return false;
}
}
ValueNodePtr new_value_node;
if (kParseTypeSwitchMap[type] == FORM_PARSE_SCALAR) {
if (kv.size() == 1) {
std::unordered_map<std::string, ValuePtr>::iterator iter = kv.begin();
new_value_node = NewValueNode(iter->second);
new_value_node->set_abstract(iter->second->ToAbstract());
} else {
auto value_ptr = ParserScalarAttrValue(ref_attr_name, kv);
new_value_node = NewValueNode(value_ptr);
new_value_node->set_abstract(value_ptr->ToAbstract());
}
anfnode_build_map_[value_node_name] = new_value_node;
}
return true;
}
bool AnfImporterFromProtobuf::BuildValueNodeForFuncGraph(const onnx::NodeProto &node_proto) {
const std::string &value_node_name = node_proto.output(0);
const onnx::AttributeProto &attr_proto = node_proto.attribute(0);
if (!attr_proto.has_ref_attr_name()) {
MS_LOG(ERROR) << "parse ValueNode don't have ref_attr_name";
return false;
}
return GetAttrValueForValueNode(value_node_name, attr_proto);
}
std::unordered_map<std::string, abstract::AbstractTensorPtr>
AnfImporterFromProtobuf::GetAbstractForCNode(const onnx::AttributeProto &attr_proto) {
std::unordered_map<std::string, abstract::AbstractTensorPtr> kv;
for (int i = 0; i < attr_proto.tensors_size(); i++) {
std::vector<int> shape_vec;
const onnx::TensorProto &attr_tensor = attr_proto.tensors(i);
for (int j = 0; j < attr_tensor.dims_size(); ++j) {
shape_vec.push_back(attr_tensor.dims(j));
}
auto type_ptr = TypeIdToType(kDefaultValueSwitchMap[attr_tensor.data_type()]);
auto abstract_tensor = std::make_shared<abstract::AbstractTensor>(type_ptr, shape_vec);
kv.insert(std::pair<string, abstract::AbstractTensorPtr>(attr_tensor.name(), abstract_tensor));
}
return kv;
}
CNodePtr AnfImporterFromProtobuf::BuildCNodeForFuncGraph(const FuncGraphPtr &outputFuncGraph,
const onnx::NodeProto &node_proto) {
MS_EXCEPTION_IF_NULL(outputFuncGraph);
if (!node_proto.has_op_type()) {
MS_LOG(ERROR) << "Get CNode op_type failed!";
return nullptr;
}
const std::string &node_name = node_proto.output(0);
const std::string &fullname_with_scope = node_proto.domain();
const std::string &node_type = node_proto.op_type();
PrimitivePtr prim = std::make_shared<mindspore::Primitive>(node_type);
MS_EXCEPTION_IF_NULL(prim);
prim->set_instance_name(node_type);
std::unordered_map<std::string, abstract::AbstractTensorPtr> kv;
string shape_ref_attr_name;
for (int i = 0; i < node_proto.attribute_size(); ++i) {
const onnx::AttributeProto &attr_proto = node_proto.attribute(i);
if (attr_proto.ref_attr_name().find("shape:") != string::npos) {
shape_ref_attr_name = attr_proto.ref_attr_name();
kv = GetAbstractForCNode(attr_proto);
continue;
}
if (!GetAttrValueForCNode(prim, attr_proto)) {
MS_LOG(ERROR) << "Get CNode attr failed!";
return nullptr;
}
}
std::vector<AnfNodePtr> inputs;
inputs.clear();
inputs.push_back(NewValueNode(prim));
for (int i = 0; i < node_proto.input_size(); ++i) {
const std::string &input_name = node_proto.input(i);
if (anfnode_build_map_.find(input_name) == anfnode_build_map_.end()) {
MS_LOG(ERROR) << node_name << " input " << i << input_name << "can't find in nodes have parsed";
return nullptr;
}
inputs.push_back(anfnode_build_map_[input_name]);
}
CNodePtr cnode_ptr = outputFuncGraph->NewCNode(inputs);
MS_EXCEPTION_IF_NULL(cnode_ptr);
if (0 == kv.size()) {
AbstractBasePtrList elem;
for (size_t index = 1; index < cnode_ptr->inputs().size(); ++index) {
elem.push_back(cnode_ptr->input(index)->abstract());
}
cnode_ptr->set_abstract(std::make_shared<abstract::AbstractTuple>(elem));
} else if (1 == kv.size()) {
std::unordered_map<std::string, abstract::AbstractTensorPtr>::iterator iter = kv.begin();
cnode_ptr->set_abstract(iter->second);
} else {
auto abstract = ParserAttrShape(shape_ref_attr_name, kv);
cnode_ptr->set_abstract(abstract);
}
cnode_ptr->set_fullname_with_scope(fullname_with_scope);
anfnode_build_map_[node_name] = cnode_ptr;
return cnode_ptr;
}
bool AnfImporterFromProtobuf::BuildReturnForFuncGraph(const FuncGraphPtr &outputFuncGraph,
const onnx::GraphProto &importProto,
const CNodePtr &cnode_ptr) {
MS_EXCEPTION_IF_NULL(outputFuncGraph);
MS_EXCEPTION_IF_NULL(cnode_ptr);
std::vector<AnfNodePtr> inputs;
if (importProto.output_size() > 1) {
inputs.clear();
inputs.push_back(NewValueNode(prim::kPrimMakeTuple));
AbstractBasePtrList elem;
for (int out_size = 0; out_size < importProto.output_size(); ++out_size) {
const onnx::ValueInfoProto &output_node = importProto.output(out_size);
const std::string &out_tuple = output_node.name();
inputs.push_back(anfnode_build_map_[out_tuple]);
elem.push_back(anfnode_build_map_[out_tuple]->abstract());
}
auto maketuple_ptr = outputFuncGraph->NewCNode(inputs);
maketuple_ptr->set_abstract(std::make_shared<abstract::AbstractTuple>(elem));
inputs.clear();
inputs.push_back(NewValueNode(prim::kPrimReturn));
inputs.push_back(maketuple_ptr);
auto return_node = outputFuncGraph->NewCNode(inputs);
MS_EXCEPTION_IF_NULL(return_node);
outputFuncGraph->set_return(return_node);
MS_LOG(INFO) << "Construct funcgraph finined, all success.";
} else {
const onnx::ValueInfoProto &output_node = importProto.output(0);
const onnx::TypeProto &output_typeproto = output_node.type();
int output_type = output_typeproto.tensor_type().elem_type();
std::vector<int> output_shape;
for (int i = 0; i < output_typeproto.tensor_type().shape().dim_size(); ++i) {
output_shape.push_back(output_typeproto.tensor_type().shape().dim(i).dim_value());
}
auto type_ptr = TypeIdToType(kDefaultValueSwitchMap[output_type]);
auto abstract_tensor = std::make_shared<abstract::AbstractTensor>(type_ptr, output_shape);
inputs.clear();
inputs.push_back(NewValueNode(prim::kPrimReturn));
inputs.push_back(cnode_ptr);
auto return_node = outputFuncGraph->NewCNode(inputs);
MS_EXCEPTION_IF_NULL(return_node);
return_node->set_abstract(abstract_tensor);
outputFuncGraph->set_return(return_node);
MS_LOG(INFO) << "Construct funcgraph finined, all success!";
}
return true;
}
bool AnfImporterFromProtobuf::ImportNodesForGraph(const FuncGraphPtr &outputFuncGraph,
const onnx::GraphProto &importProto) {
MS_EXCEPTION_IF_NULL(outputFuncGraph);
MS_LOG(INFO) << "The CNdoe size : " << importProto.node_size();
CNodePtr cnode_ptr = nullptr;
for (int i = 0; i < importProto.node_size(); ++i) {
const onnx::NodeProto &node_proto = importProto.node(i);
const std::string &node_type = node_proto.op_type();
if (node_type == kConstantValueNode) {
if (!BuildValueNodeForFuncGraph(node_proto)) {
MS_LOG(ERROR) << "Build ValueNode for funcgraph fail at index: : " << i;
return false;
}
continue;
}
cnode_ptr = BuildCNodeForFuncGraph(outputFuncGraph, node_proto);
if (cnode_ptr == nullptr) {
MS_LOG(ERROR) << "Build CNode for funcgraph fail at index: : " << i;
return false;
}
}
BuildReturnForFuncGraph(outputFuncGraph, importProto, cnode_ptr);
return true;
}
bool AnfImporterFromProtobuf::BuildFuncGraph(const FuncGraphPtr &outputFuncGraph, const onnx::GraphProto &importProto) {
MS_EXCEPTION_IF_NULL(outputFuncGraph);
GraphDebugInfoPtr debug_info_ptr = outputFuncGraph->debug_info();
MS_EXCEPTION_IF_NULL(debug_info_ptr);
if (importProto.has_name()) {
debug_info_ptr->set_name(importProto.name());
} else {
MS_LOG(ERROR) << "FuncGraph under converting has not name!";
}
if (!ImportParametersForGraph(outputFuncGraph, importProto)) {
return false;
}
return ImportNodesForGraph(outputFuncGraph, importProto);
}
bool AnfImporterFromProtobuf::ParseModelConfigureInfo(const onnx::ModelProto &model_proto) {
if (!model_proto.has_producer_name()) {
MS_LOG(ERROR) << "Parse model producer name from pb file failed!";
return false;
}
producer_name_ = model_proto.producer_name();
if (!model_proto.has_model_version()) {
MS_LOG(ERROR) << "Parse model producer version from pb file failed!";
return false;
}
model_version_ = model_proto.model_version();
if (!model_proto.has_ir_version()) {
MS_LOG(ERROR) << "Parse model version from pb file failed!";
return false;
}
ir_version_ = model_proto.ir_version();
return true;
}
int AnfImporterFromProtobuf::Import() {
FuncGraphPtr dstGraph = std::make_shared<mindspore::FuncGraph>();
MS_EXCEPTION_IF_NULL(dstGraph);
if (!ParseModelConfigureInfo(*onnx_model_)) {
MS_LOG(ERROR) << "Parse configuration info for pb file failed!";
}
const onnx::GraphProto &graphBuild = onnx_model_->graph();
if (!BuildFuncGraph(dstGraph, graphBuild)) {
MS_LOG(ERROR) << "Build funcgraph failed!";
return RET_ERROR;
}
func_graph_ = dstGraph;
MS_LOG(INFO) << "Parse pb to build FuncGraph Success!";
return RET_OK;
}
onnx::ModelProto *AnfImporterFromProtobuf::ReadOnnxFromBinary(const std::string &model_path) {
std::unique_ptr<char> onnx_file(new(std::nothrow) char[PATH_MAX]{0});
if (realpath(model_path.c_str(), onnx_file.get()) == nullptr) {
MS_LOG(ERROR) << "open file failed.";
return nullptr;
}
int fd = open(onnx_file.get(), O_RDONLY);
google::protobuf::io::FileInputStream input(fd);
google::protobuf::io::CodedInputStream code_input(&input);
code_input.SetTotalBytesLimit(INT_MAX, 536870912);
auto onnx_model = new onnx::ModelProto;
bool ret = onnx_model->ParseFromCodedStream(&code_input);
if (!ret) {
MS_LOG(ERROR) << "load onnx file failed";
delete onnx_model;
return nullptr;
}
(void) close(fd);
MS_LOG(INFO) << "enter ReadProtoFromBinary success!" << std::endl;
return onnx_model;
}
FuncGraphPtr AnfImporterFromProtobuf::GetResult() { return this->func_graph_; }
} // namespace mindspore::lite

View File

@ -0,0 +1,92 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_SRC_ANF_IMPORTER_IMPORTER_FROM_PROTOBUF_H_
#define MINDSPORE_LITE_SRC_ANF_IMPORTER_IMPORTER_FROM_PROTOBUF_H_
#include <string>
#include <map>
#include <unordered_map>
#include <utility>
#include "tools/converter/parser/onnx/onnx.pb.h"
#include "src/common/anf_importer/anf_importer.h"
#include "abstract/abstract_value.h"
namespace mindspore::lite {
class AnfImporterFromProtobuf : public AnfImporter {
public:
explicit AnfImporterFromProtobuf(onnx::ModelProto *onnx_model, FuncGraphPtr func_graph)
: onnx_model_(onnx_model), func_graph_(std::move(func_graph)) {}
~AnfImporterFromProtobuf() override = default;
static onnx::ModelProto *ReadOnnxFromBinary(const std::string &model_path);
FuncGraphPtr GetResult() override;
int Import() override;
private:
void ConverterConstTensor() override {};
int ConverterCNode() override {};
void AddReturnCNode() override {};
bool ParseModelConfigureInfo(const onnx::ModelProto &model_proto);
bool BuildFuncGraph(const FuncGraphPtr &outputFuncGraph,
const onnx::GraphProto &importProto);
bool ImportParametersForGraph(const FuncGraphPtr &outputFuncGraph,
const onnx::GraphProto &importProto);
bool ImportNodesForGraph(const FuncGraphPtr &outputFuncGraph,
const onnx::GraphProto &importProto);
bool BuildParameterForFuncGraph(const ParameterPtr &node,
const onnx::ValueInfoProto &value_proto);
CNodePtr BuildCNodeForFuncGraph(const FuncGraphPtr &outputFuncGraph,
const onnx::NodeProto &node_proto);
bool BuildReturnForFuncGraph(const FuncGraphPtr &outputFuncGraph,
const onnx::GraphProto &importProto,
const CNodePtr &cnode_ptr);
bool GetAttrValueForCNode(const PrimitivePtr &prim,
const onnx::AttributeProto &attr_proto);
bool ObtainCNodeAttrInTypeForm(const PrimitivePtr &prim,
const std::string &attr_name,
const onnx::TensorProto &attr_tensor);
ValuePtr ObtainCNodeAttrInScalarForm(const onnx::TensorProto &attr_tensor);
bool ObtainCNodeAttrInTensorForm(const PrimitivePtr &prim,
const std::string &attr_name,
const onnx::TensorProto &attr_tensor);
bool BuildValueNodeForFuncGraph(const onnx::NodeProto &node_proto);
bool ObtainValueNodeInTensorForm(const std::string &value_node_name,
const onnx::TensorProto &attr_tensor);
bool GetAttrValueForValueNode(const std::string &value_node_name,
const onnx::AttributeProto &attr_tensor);
bool ObtainValueNodeInTypeForm(const std::string &value_node_name,
const onnx::TensorProto &attr_tensor);
std::unordered_map<std::string, abstract::AbstractTensorPtr>
GetAbstractForCNode(const onnx::AttributeProto &attr_proto);
private:
std::string producer_name_;
int model_version_{};
int ir_version_{};
std::unordered_map<std::string, AnfNodePtr> anfnode_build_map_;
std::map<std::string, onnx::TensorProto> default_para_map_;
onnx::ModelProto *onnx_model_;
FuncGraphPtr func_graph_;
};
} // namespace mindspore::lite
#endif // MINDSPORE_LITE_SRC_ANF_IMPORTER_IMPORTER_FROM_PROTOBUF_H_

View File

@ -0,0 +1,64 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_COMMON_COMMON_H_
#define MINDSPORE_LITE_COMMON_COMMON_H_
#include <string>
#include "schema/model_generated.h"
namespace mindspore {
namespace lite {
enum NCHW_SHAPE { NCHW_N = 0, NCHW_C = 1, NCHW_H = 2, NCHW_W = 3 };
enum NHWC_SHAPE { NHWC_N = 0, NHWC_H = 1, NHWC_W = 2, NHWC_C = 3 };
enum HWCK_SHAPE { HWCK_H = 0, HWCK_W = 1, HWCK_C = 2, HWCK_K = 3 };
enum HWKC_SHAPE { HWKC_H = 0, HWKC_W = 1, HWKC_K = 2, HWKC_C = 3 };
enum KCHW_SHAPE { KCHW_K = 0, KCHW_C = 1, KCHW_H = 2, KCHW_W = 3 };
enum CKHW_SHAPE { CKHW_C = 0, CKHW_K = 1, CKHW_H = 2, CKHW_W = 3 };
enum CHWK_SHAPE { CHWK_C = 0, CHWK_H = 1, CHWK_W = 2, CHWK_K = 3 };
enum KHWC_SHAPE { KHWC_K = 0, KHWC_H = 1, KHWC_W = 2, KHWC_C = 3 };
enum CHW_SHAPE { CHW_C = 0, CHW_H = 1, CHW_W = 2 };
enum HWC_SHAPE { HWC_H = 0, HWC_W = 1, HWC_C = 2 };
static constexpr int kNCHWDimNumber = 4;
static constexpr int kNHWCDimNumber = 4;
static constexpr int TENSOR_MAX_REFCOUNT = 999;
static const char *DELIM_COLON = ":";
static const char *DELIM_COMMA = ",";
static const char *DELIM_SLASH = "/";
static const char *DELIM_DOUBLE_BACKSLASH = "\\";
// quantization relative
static const char QUANTIZED_UINT8[] = "QUANTIZED_UINT8";
static const char QUANTIZED_INT8[] = "QUANTIZED_INT8";
static const char QUANTIZED_INT16[] = "QUANTIZED_INT16";
static const char QUANTIZED_UINT16[] = "QUANTIZED_UINT16";
static const char QUANTIZED_FLOAT16[] = "FLOAT16";
static const char QUANTIZED_FLOAT32[] = "FLOAT32";
static const char QUANTIZATION_TYPE_DYNAMIC[] = "DYNAMIC";
static const char QUANTIZATION_TYPE_STATIC[] = "STATIC";
static const char CALIB_NORM[] = "NORM";
// dims
static const int32_t DIM_DEFAULT_SIZE = 4;
static const schema::Format DEFAULT_FORMAT = schema::Format_NCHW;
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_COMMON_COMMON_H_

View File

@ -0,0 +1,168 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <stdlib.h>
#include <fcntl.h>
#include <climits>
#include <cmath>
#include "src/common/file_utils.h"
#include "securec/include/securec.h"
namespace mindspore {
namespace lite {
#define MAX_FILENAME_LEN 1024
char *ReadFile(const char *file, size_t *size) {
if (file == nullptr) {
MS_LOG(ERROR) << "file is nullptr";
return nullptr;
}
MS_ASSERT(size != nullptr);
std::string realPath = RealPath(file);
std::ifstream ifs(realPath);
if (!ifs.good()) {
MS_LOG(ERROR) << "file: " << realPath << " is not exist";
return nullptr;
}
if (!ifs.is_open()) {
MS_LOG(ERROR) << "file: " << realPath << " open failed";
return nullptr;
}
ifs.seekg(0, std::ios::end);
*size = ifs.tellg();
std::unique_ptr<char> buf(new (std::nothrow) char[*size]);
if (buf == nullptr) {
MS_LOG(ERROR) << "malloc buf failed, file: " << realPath;
ifs.close();
return nullptr;
}
ifs.seekg(0, std::ios::beg);
ifs.read(buf.get(), *size);
ifs.close();
return buf.release();
}
std::string RealPath(const char *path) {
if (path == nullptr) {
MS_LOG(ERROR) << "path is nullptr";
return "";
}
if ((strlen(path)) >= PATH_MAX) {
MS_LOG(ERROR) << "path is too long";
return "";
}
std::shared_ptr<char> resolvedPath(new (std::nothrow) char[PATH_MAX]{0});
if (resolvedPath == nullptr) {
MS_LOG(ERROR) << "new resolvedPath failed";
return "";
}
std::string realPath = realpath(path, resolvedPath.get());
if (realPath.empty()) {
MS_LOG(ERROR) << "Proto file path is not valid";
return "";
}
std::string res = resolvedPath.get();
return res;
}
int WriteToBin(const std::string &file_path, void *data, size_t size) {
std::ofstream out_file;
out_file.open(file_path.c_str(), std::ios::binary);
if (!out_file.good()) {
return -1;
}
if (!out_file.is_open()) {
out_file.close();
return -1;
}
out_file.write(reinterpret_cast<char *>(data), size);
return 0;
}
int CompareOutputData(float *output_data, float *correct_data, int data_size) {
float error = 0;
for (size_t i = 0; i < data_size; i++) {
float abs = fabs(output_data[i] - correct_data[i]);
if (abs > 0.00001) {
error += abs;
}
}
error /= data_size;
if (error > 0.0001) {
printf("has accuracy error!\n");
printf("%f\n", error);
return 1;
}
return 0;
}
void CompareOutput(float *output_data, std::string file_path) {
size_t output_size;
auto ground_truth = reinterpret_cast<float *>(mindspore::lite::ReadFile(file_path.c_str(), &output_size));
size_t output_num = output_size / sizeof(float);
printf("output num : %zu\n", output_num);
CompareOutputData(output_data, ground_truth, output_num);
}
// std::string GetAndroidPackageName() {
// static std::string packageName;
//
// if (!packageName.empty()) {
// return packageName;
// }
//
// char cmdline[MAX_FILENAME_LEN] = {0};
// int fd = open("/proc/self/cmdline", O_RDONLY);
//
// if (fd >= 0) {
// char ch;
// int i = 0;
// while (read(fd, &ch, sizeof(ch)) > 0 && !isspace(ch)) {
// if (':' == ch) {
// break;
// }
//
// if (('/' == ch) || ('\\' == ch)) {
// (void)memset(cmdline, 0, sizeof(cmdline));
// i = 0;
// } else {
// cmdline[i] = ch;
// i++;
// }
// }
// close(fd);
// }
// packageName = std::string(cmdline);
// return packageName;
//}
// std::string GetAndroidPackagePath() {
// std::string packageName = GetAndroidPackageName();
// if (packageName.empty()) {
// return "./";
// }
// return "/data/data/" + packageName + '/';
//}
} // namespace lite
} // namespace mindspore

View File

@ -0,0 +1,58 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_COMMON_FILE_UTILS_H_
#define MINDSPORE_LITE_COMMON_FILE_UTILS_H_
#include <cstdio>
#include <cstdlib>
#include <ctime>
#include <string>
#include <iostream>
#include <memory>
#include <fstream>
#include "src/common/utils.h"
#include "utils/log_adapter.h"
namespace mindspore {
namespace lite {
char *ReadFile(const char *file, size_t *size);
std::string RealPath(const char *path);
template <typename T>
void WriteToTxt(const std::string& file_path, void *data, size_t element_size) {
std::ofstream out_file;
out_file.open(file_path, std::ios::out);
auto real_data = reinterpret_cast<T *>(data);
for (size_t i = 0; i < element_size; i++) {
out_file << real_data[i] << " ";
}
out_file.close();
}
int WriteToBin(const std::string& file_path, void *data, size_t size);
int CompareOutputData(float *output_data, float *correct_data, int data_size);
void CompareOutput(float *output_data, std::string file_path);
std::string GetAndroidPackageName();
std::string GetAndroidPackagePath();
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_COMMON_FILE_UTILS_H_

View File

@ -0,0 +1,77 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <fstream>
#include <sstream>
#include <utility>
#include "src/common/graph_util.h"
#include "src/common/utils.h"
#include "utils/log_adapter.h"
#include "include/errorcode.h"
namespace mindspore {
namespace lite {
std::vector<size_t> GetGraphInputNodes(const schema::MetaGraph *meta_graph) {
MS_ASSERT(nullptr != meta_graph);
std::vector<size_t> ret;
for (size_t i = 0; i < meta_graph->inputIndex()->size(); i++) {
auto input_index = meta_graph->inputIndex()->GetAs<uint32_t>(i);
for (size_t j = 0; j < meta_graph->nodes()->size(); j++) {
auto *cNode = meta_graph->nodes()->GetAs<schema::CNode>(j);
MS_ASSERT(nullptr != cNode);
for (size_t k = 0; k < cNode->inputIndex()->size(); k++) {
if (cNode->inputIndex()->GetAs<uint32_t>(k) == input_index) {
ret.emplace_back(j);
break;
}
}
}
}
return std::move(ret);
}
std::vector<size_t> GetGraphOutputNodes(const schema::MetaGraph *meta_graph) {
MS_ASSERT(nullptr != meta_graph);
std::vector<size_t> ret;
for (size_t i = 0; i < meta_graph->outputIndex()->size(); i++) {
auto output_index = meta_graph->outputIndex()->GetAs<uint32_t>(i);
for (size_t j = 0; j < meta_graph->nodes()->size(); j++) {
auto *cNode = meta_graph->nodes()->GetAs<schema::CNode>(j);
MS_ASSERT(nullptr != cNode);
for (size_t k = 0; k < cNode->outputIndex()->size(); k++) {
if (cNode->outputIndex()->GetAs<uint32_t>(k) == output_index) {
ret.emplace_back(j);
break;
}
}
}
}
return std::move(ret);
}
// NODE_ID OpNode::ID() { return id; }
//
// void OpNode::AddInEdge(NODE_ID nodeId) { inEdges.insert(nodeId); }
//
// void OpNode::AddOutEdge(NODE_ID nodeId) { outEdges.insert(nodeId); }
//
// std::unordered_set<NODE_ID> OpNode::GetAllInEdges() { return inEdges; }
//
// std::unordered_set<NODE_ID> OpNode::GetAllOutEdges() { return outEdges; }
} // namespace lite
} // namespace mindspore

View File

@ -0,0 +1,250 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_COMMON_GRAPH_UTIL_H_
#define MINDSPORE_LITE_COMMON_GRAPH_UTIL_H_
#include <string>
#include <unordered_map>
#include <unordered_set>
#include <vector>
#include <memory>
#include "schema/model_generated.h"
#include "utils//log_adapter.h"
#include "include/errorcode.h"
namespace mindspore {
namespace lite {
using NODE_ID = std::string;
std::vector<size_t> GetGraphInputNodes(const schema::MetaGraph *meta_graph);
std::vector<size_t> GetGraphOutputNodes(const schema::MetaGraph *meta_graph);
class OpNode {
public:
explicit OpNode(const NODE_ID &nodeId) : id(nodeId) {}
NODE_ID ID() { return id; };
void AddInEdge(NODE_ID nodeId) { inEdges.insert(nodeId); }
void AddOutEdge(NODE_ID nodeId) { outEdges.insert(nodeId); }
std::unordered_set<NODE_ID> GetAllInEdges() { return inEdges; }
std::unordered_set<NODE_ID> GetAllOutEdges() { return outEdges; }
protected:
NODE_ID id;
std::unordered_set<NODE_ID> inEdges;
std::unordered_set<NODE_ID> outEdges;
};
template <typename NODE_T>
class OpGraph {
public:
OpGraph() {}
~OpGraph();
int Build(const schema::MetaGraph *subGraphDef);
NODE_T *GetNode(NODE_ID nodeId);
NODE_T *AddNode(NODE_ID nodeId);
std::unordered_set<NODE_T *> GetInputNode();
std::unordered_set<NODE_T *> GetOutputNode();
void AddNodes(std::vector<NODE_T *> addNodes);
void DeleteNodes(std::vector<NODE_T *> deleteNodes);
void AddEdge(NODE_ID nodeId);
int AddEdge(NODE_ID srcId, NODE_ID dstId);
int AddEdge(const schema::CNode *srcNodeDef, const flatbuffers::Vector<flatbuffers::Offset<schema::CNode>> *opDefs);
std::unordered_map<NODE_T *, std::unordered_set<NODE_T *>> GetDepends();
protected:
std::unordered_map<NODE_ID, NODE_T *> nodes;
};
template <typename NODE_T>
int OpGraph<NODE_T>::Build(const schema::MetaGraph *subGraphDef) {
if (subGraphDef == nullptr) {
// MS_LOGE("subGraphDef is nullptr");
return RET_ERROR;
}
auto opDefs = subGraphDef->nodes();
uint32_t opCount = opDefs->size();
for (uint32_t i = 0; i < opCount; i++) {
auto opDef = opDefs->GetAs<schema::CNode>(i);
auto node = AddNode(std::string(opDef->name()->c_str()));
if (node == nullptr) {
// MS_LOGE("add srcNode failed,name %s", opDef->name()->c_str());
return RET_ERROR;
}
auto ret = AddEdge(opDef, opDefs);
if (ret != RET_OK) {
// MS_LOGE("%s add edge failed. ret:%d", opDef->name()->c_str(), ret);
return RET_ERROR;
}
}
return RET_OK;
}
template <typename NODE_T>
int OpGraph<NODE_T>::AddEdge(const schema::CNode *srcNodeDef,
const flatbuffers::Vector<flatbuffers::Offset<schema::CNode>> *nodeDefs) {
MS_ASSERT(srcNodeDef != nullptr);
MS_ASSERT(nodeDefs != nullptr);
NODE_ID srcId = std::string(srcNodeDef->name()->c_str());
uint32_t opCount = nodeDefs->size();
// for single op condition
AddNode(srcId);
for (auto index : *(srcNodeDef->outputIndex())) {
for (uint32_t i = 0; i < opCount; i++) {
auto dstNodeDef = nodeDefs->GetAs<schema::CNode>(i);
bool find = false;
auto inputIndex = dstNodeDef->inputIndex();
if (std::any_of(inputIndex->begin(), inputIndex->end(), [&index](int i) { return i == index; })) {
find = true;
}
if (!find) {
continue;
}
NODE_ID dstId = std::string(dstNodeDef->name()->c_str());
auto ret = AddEdge(srcId, dstId);
if (ret != RET_OK) {
return ret;
}
}
}
return RET_OK;
}
template <typename NODE_T>
int OpGraph<NODE_T>::AddEdge(NODE_ID srcId, NODE_ID dstId) {
auto srcNode = AddNode(srcId);
if (srcNode == nullptr) {
// MS_LOGE("add srcNode failed");
return RET_ERROR;
}
auto dstNode = AddNode(dstId);
if (dstNode == nullptr) {
// MS_LOGE("add dstNode failed");
return RET_ERROR;
}
srcNode->AddOutEdge(dstNode);
dstNode->AddInEdge(srcNode);
return RET_OK;
}
template <typename NODE_T>
NODE_T *OpGraph<NODE_T>::GetNode(NODE_ID nodeId) {
auto node = nodes.find(nodeId);
if (node == nodes.end()) {
return nullptr;
}
return node->second;
}
template <typename NODE_T>
NODE_T *OpGraph<NODE_T>::AddNode(NODE_ID nodeId) {
auto node = GetNode(nodeId);
if (node != nullptr) {
return node;
}
node = new (std::nothrow) NODE_T(nodeId);
if (node == nullptr) {
// MS_LOGE("new node failed");
return nullptr;
}
nodes[nodeId] = node;
return node;
}
template <typename NODE_T>
void OpGraph<NODE_T>::AddNodes(std::vector<NODE_T *> addNodes) {
for (auto node : addNodes) {
if (node == nullptr) {
return;
}
nodes[node->ID()] = node;
}
}
template <typename NODE_T>
void OpGraph<NODE_T>::DeleteNodes(std::vector<NODE_T *> deleteNodes) {
for (auto deletenode : deleteNodes) {
if (deletenode == nullptr) {
continue;
}
auto node = GetNode(deletenode->ID());
if (node == nullptr) {
continue;
}
nodes.erase(deletenode->ID());
}
}
template <typename NODE_T>
std::unordered_set<NODE_T *> OpGraph<NODE_T>::GetInputNode() {
std::unordered_set<NODE_T *> inputNodes;
for (const auto &iter : nodes) {
auto node = iter.second;
if (node->GetAllInEdges().empty()) {
inputNodes.insert(node);
}
}
return inputNodes;
}
template <typename NODE_T>
std::unordered_set<NODE_T *> OpGraph<NODE_T>::GetOutputNode() {
std::unordered_set<NODE_T *> outputNodes;
for (const auto &iter : nodes) {
auto node = iter.second;
if (node->GetAllOutEdges().empty()) {
outputNodes.insert(node);
}
}
return outputNodes;
}
template <typename NODE_T>
std::unordered_map<NODE_T *, std::unordered_set<NODE_T *>> OpGraph<NODE_T>::GetDepends() {
std::unordered_map<NODE_T *, std::unordered_set<NODE_T *>> depends;
for (auto nodeIter : nodes) {
depends[nodeIter.second] = nodeIter.second->GetAllInEdges();
}
return depends;
}
template <typename NODE_T>
OpGraph<NODE_T>::~OpGraph() {
for (auto iter : nodes) {
delete iter.second;
}
nodes.clear();
}
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_COMMON_GRAPH_UTIL_H_

View File

@ -0,0 +1,151 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ir/graph_utils.h"
#include <unordered_map>
#include <unordered_set>
#include <utility>
#include <stack>
#include <vector>
#include <list>
#include <string>
#include <fstream>
#include "ir/visitor.h"
#include "ir/func_graph.h"
#include "debug/label.h"
#include "utils/log_adapter.h"
#include "src/common/utils.h"
namespace mindspore {
namespace {
class DeepFirstSearcher {
public:
explicit DeepFirstSearcher(const IncludeFunc &include) : include_(include) {}
~DeepFirstSearcher() = default;
std::vector<AnfNodePtr> Search(const AnfNodePtr &root) {
if (root == nullptr) {
return res_;
}
seen_ = NewSeenGeneration();
Visit(root);
return res_;
}
void Visit(const AnfNodePtr &node) {
if (node == nullptr) {
return;
}
if (node->seen_ == seen_) {
return;
}
node->seen_ = seen_;
auto incl = include_(node);
if (incl == EXCLUDE) {
return;
}
if (filter_ == nullptr || !filter_(node)) {
res_.push_back(node);
}
if (incl == FOLLOW) {
if (node->isa<CNode>()) {
auto cnode = node->cast<CNodePtr>();
auto &inputs = cnode->inputs();
for (auto iter = inputs.rbegin(); iter != inputs.rend(); ++iter) {
Visit(*iter);
}
return;
}
}
}
private:
size_t seen_{0};
IncludeFunc include_;
FilterFunc filter_;
std::vector<AnfNodePtr> res_{};
};
class DeepScopedGraphSearcher : public DeepFirstSearcher {
public:
explicit DeepScopedGraphSearcher(const IncludeFunc &include) : DeepFirstSearcher(include) {}
~DeepScopedGraphSearcher() = default;
void Visit(const CNodePtr &cnode) { return; }
void Visit(const ValueNodePtr &vnode) {
if (!IsValueNode<FuncGraph>(vnode)) {
return;
}
auto graph = GetValueNode<FuncGraphPtr>(vnode);
AnfNodePtr ret = graph->get_return();
if (ret != nullptr) {
DeepFirstSearcher::Visit(ret);
}
}
void Visit(const ParameterPtr &param) {
if (param->func_graph() == nullptr) {
return;
}
AnfNodePtr ret = param->func_graph()->get_return();
if (ret != nullptr) {
DeepFirstSearcher::Visit(ret);
}
}
};
class DeepUsedGraphSearcher : public DeepFirstSearcher {
public:
explicit DeepUsedGraphSearcher(const IncludeFunc &include) : DeepFirstSearcher(include) {}
~DeepUsedGraphSearcher() = default;
void Visit(const CNodePtr &cnode) { return; }
void Visit(const ValueNodePtr &vnode) { return; }
};
class DeepLinkedGraphSearcher : public DeepFirstSearcher {
public:
explicit DeepLinkedGraphSearcher(const IncludeFunc &include) : DeepFirstSearcher(include) {}
~DeepLinkedGraphSearcher() = default;
void Visit(const CNodePtr &cnode) { return; }
void Visit(const ValueNodePtr &) {}
};
} // namespace
std::vector<AnfNodePtr> DeepScopedGraphSearch(const AnfNodePtr &root, const IncludeFunc &include) {
return DeepScopedGraphSearcher(include).Search(root);
}
std::vector<AnfNodePtr> DeepUsedGraphSearch(const AnfNodePtr &root, const IncludeFunc &include) {
return DeepUsedGraphSearcher(include).Search(root);
}
std::vector<AnfNodePtr> DeepLinkedGraphSearch(const AnfNodePtr &root, const IncludeFunc &include) {
return DeepLinkedGraphSearcher(include).Search(root);
}
} // namespace mindspore

View File

@ -0,0 +1,32 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_COMMON_OP_UTILS_H_
#define MINDSPORE_LITE_COMMON_OP_UTILS_H_
#include <functional>
#include <string>
#include "schema/model_generated.h"
namespace mindspore {
namespace lite {
inline schema::PrimitiveType GetOpType(const schema::CNode &opDef) { return opDef.primitive()->value_type(); }
inline std::string GetOpTypeName(const schema::CNode &opDef) { return schema::EnumNamePrimitiveType(GetOpType(opDef)); }
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_COMMON_OP_UTILS_H_

View File

@ -0,0 +1,262 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifdef __ANDROID__
#include <sys/auxv.h>
#endif
#include "src/common/utils.h"
namespace mindspore {
namespace lite {
std::vector<std::string> StringSplit(std::string str, const std::string& pattern) {
std::vector<std::string> result;
if (str.empty()) {
return result;
}
std::string::size_type pos;
str += pattern;
auto size = str.size();
for (size_t i = 0; i < size; i++) {
pos = str.find(pattern, i);
if (pos < size) {
std::string s = str.substr(i, pos - i);
result.push_back(s);
i = pos + pattern.size() - 1;
}
}
return result;
}
uint64_t GetTimeUs() {
struct timespec ts = {0, 0};
if (clock_gettime(CLOCK_MONOTONIC, &ts) != 0) {
return 0;
}
// USECS_IN_SEC *NSECS_IN_USEC;
uint64_t retval = static_cast<uint64_t>((ts.tv_sec * USEC) + (ts.tv_nsec / MSEC));
return retval;
}
static const unsigned int FP32_BIT_SIZE = 32;
static const unsigned int FP32_EXPONENT_BIAS = 127;
static const unsigned int FP32_SIGNIFICAND = 23;
static const unsigned int FP32_EXPONENT_MAX = 255;
static const unsigned int FP16_BIT_SIZE = 16;
static const unsigned int FP16_EXPONENT_BIAS = 15;
static const unsigned int FP16_SIGNIFICAND = 10;
static const int FP16_EXPONENT_MAX = 30;
static const int FP16_EXPONENT_MIN = -10;
// fp16.c
float ShortToFloat32(int16_t srcValue) {
uint16_t expHalf16 = srcValue & 0x7C00;
int exp1 = static_cast<int>(expHalf16);
uint16_t mantissa16 = srcValue & 0x03FF;
int mantissa1 = static_cast<int>(mantissa16);
int sign = static_cast<int>(srcValue & 0x8000);
sign = sign << FP16_BIT_SIZE;
// nan or inf
if (expHalf16 == 0x7C00) {
// nan
if (mantissa16 > 0) {
int res = (0x7FC00000 | sign);
int *iRes = &res;
auto fres = static_cast<float>(*iRes);
return fres;
}
// inf
int res = (0x7F800000 | sign);
int *iRes = &res;
auto fres = static_cast<float>(*iRes);
return fres;
}
if (expHalf16 != 0) {
exp1 += ((FP32_EXPONENT_BIAS - FP16_EXPONENT_BIAS) << FP16_SIGNIFICAND); // exponents converted to float32 bias
int res = (exp1 | mantissa1);
res = res << (FP32_SIGNIFICAND - FP16_SIGNIFICAND);
res = (res | sign);
int *iRes = &res;
auto fres = static_cast<float>(*iRes);
return fres;
}
int xmm1 = exp1 > (1 << FP16_SIGNIFICAND) ? exp1 : (1 << FP16_SIGNIFICAND);
xmm1 = (xmm1 << (FP32_SIGNIFICAND - FP16_SIGNIFICAND));
xmm1 += ((FP32_EXPONENT_BIAS - FP16_EXPONENT_BIAS - FP16_SIGNIFICAND)
<< FP32_SIGNIFICAND); // add the bias difference to xmm1
xmm1 = xmm1 | sign; // Combine with the sign mask
auto res = static_cast<float>(mantissa1); // Convert mantissa to float
int *ixmm1 = nullptr;
ixmm1 = &xmm1;
res *= static_cast<float>(*ixmm1);
return res;
}
// __gnu_f2h_ieee
int16_t Float32ToShort(float srcValue) {
float *psrcValue = nullptr;
psrcValue = &srcValue;
auto srcValueBit = static_cast<unsigned int>(*psrcValue);
int sign = srcValueBit >> (FP32_BIT_SIZE - 1);
int mantissa = srcValueBit & 0x007FFFFF;
// exponent
int exp = ((srcValueBit & 0x7F800000) >> FP32_SIGNIFICAND) + FP16_EXPONENT_BIAS - FP32_EXPONENT_BIAS;
int16_t res;
if (exp > 0 && exp < FP16_EXPONENT_MAX) {
// use rte rounding mode, round the significand, combine sign, exponent and significand into a short.
res = (sign << (FP16_BIT_SIZE - 1)) | (exp << FP16_SIGNIFICAND) |
((mantissa + 0x00001000) >> (FP32_SIGNIFICAND - FP16_SIGNIFICAND));
} else if (srcValueBit == 0) {
res = 0;
} else {
if (exp <= 0) {
if (exp < FP16_EXPONENT_MIN) {
// value is less than min half float point
res = 0;
} else {
// normalized single, magnitude is less than min normal half float point.
mantissa = (mantissa | 0x00800000) >> (1 - exp);
// round to nearest
if ((mantissa & 0x00001000) > 0) {
mantissa = mantissa + 0x00002000;
}
// combine sign & mantissa (exp is zero to get denormalized number)
res = (sign << FP16_EXPONENT_BIAS) | (mantissa >> (FP32_SIGNIFICAND - FP16_SIGNIFICAND));
}
} else if (exp == (FP32_EXPONENT_MAX - FP32_EXPONENT_BIAS + FP16_EXPONENT_BIAS)) {
if (mantissa == 0) {
// input float is infinity, return infinity half
res = (sign << FP16_EXPONENT_BIAS) | 0x7C00;
} else {
// input float is NaN, return half NaN
res = (sign << FP16_EXPONENT_BIAS) | 0x7C00 | (mantissa >> (FP32_SIGNIFICAND - FP16_SIGNIFICAND));
}
} else {
// exp > 0, normalized single, round to nearest
if ((mantissa & 0x00001000) > 0) {
mantissa = mantissa + 0x00002000;
if ((mantissa & 0x00800000) > 0) {
mantissa = 0;
exp = exp + 1;
}
}
if (exp > FP16_EXPONENT_MAX) {
// exponent overflow - return infinity half
res = (sign << FP16_EXPONENT_BIAS) | 0x7C00;
} else {
// combine sign, exp and mantissa into normalized half
res = (sign << FP16_EXPONENT_BIAS) | (exp << FP16_SIGNIFICAND) |
(mantissa >> (FP32_SIGNIFICAND - FP16_SIGNIFICAND));
}
}
}
return res;
}
std::string Remove(const std::string &from, const std::string &subStr, Mode mode) {
std::string result = from;
if (mode == PREFIX) {
if (from.substr(0, subStr.length()) == subStr) {
result = from.substr(subStr.size());
}
} else if (mode == SUFFIX) {
if (from.rfind(subStr) == from.size() - subStr.size()) {
result = from.substr(0, from.size() - subStr.size());
}
} else {
size_t index;
while ((index = result.find(subStr)) != std::string::npos) {
result = result.erase(index, subStr.size());
}
}
return result;
}
std::vector<std::string> StrSplit(const std::string &str, const std::string &pattern) {
std::string::size_type pos;
std::vector<std::string> result;
std::string tmpStr(str + pattern);
std::string::size_type size = tmpStr.size();
for (std::string::size_type i = 0; i < size; i++) {
pos = tmpStr.find(pattern, i);
if (pos < size) {
std::string s = tmpStr.substr(i, pos - i);
result.push_back(s);
i = pos + pattern.size() - 1;
}
}
return result;
}
std::vector<std::string> Tokenize(const std::string &src, const std::string &delimiters,
const Option<size_t> &maxTokenNum) {
if (maxTokenNum.IsSome() && maxTokenNum.Get() == 0) {
return {};
}
std::vector<std::string> tokens;
size_t offset = 0;
while (true) {
size_t nonDelimiter = src.find_first_not_of(delimiters, offset);
if (nonDelimiter == std::string::npos) {
break;
}
size_t delimiter = src.find_first_of(delimiters, nonDelimiter);
if (delimiter == std::string::npos || (maxTokenNum.IsSome() && tokens.size() == maxTokenNum.Get() - 1)) {
tokens.push_back(src.substr(nonDelimiter));
break;
}
tokens.push_back(src.substr(nonDelimiter, delimiter - nonDelimiter));
offset = delimiter;
}
return tokens;
}
void ShortToFloat32(const int16_t *srcdata, float *dstdata, size_t elementSize) {
MS_ASSERT(srcdata != nullptr);
MS_ASSERT(dstdata != nullptr);
for (size_t i = 0; i < elementSize; i++) {
dstdata[i] = ShortToFloat32(srcdata[i]);
}
}
void Float32ToShort(const float *srcdata, int16_t *dstdata, size_t elementSize) {
MS_ASSERT(srcdata != nullptr);
MS_ASSERT(dstdata != nullptr);
for (size_t i = 0; i < elementSize; i++) {
dstdata[i] = Float32ToShort(srcdata[i]);
}
}
#if defined(__ANDROID__)
uint32_t getHwCap(int hwcap_type) {
uint32_t ret = getauxval(hwcap_type);
return ret;
}
#endif
} // namespace lite
} // namespace mindspore

View File

@ -0,0 +1,193 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_COMMON_UTILS_H_
#define MINDSPORE_LITE_COMMON_UTILS_H_
#include <stdint.h>
#include <ctime>
#include <cstdint>
#include <vector>
#include <set>
#include <string>
#include <utility>
#include "utils/log_adapter.h"
#include "tools/common/option.h"
#include "include/errorcode.h"
namespace mindspore {
namespace lite {
const int USEC = 1000000;
const int MSEC = 1000;
std::vector<std::string> StringSplit(std::string str, const std::string& pattern);
uint64_t GetTimeUs(void);
int16_t Float32ToShort(float srcValue);
float ShortToFloat32(int16_t srcValue);
void ShortToFloat32(const int16_t *srcdata, float *dstdata, size_t elementSize);
void Float32ToShort(const float *srcdata, int16_t *dstdata, size_t elementSize);
#if defined(__arm__) || defined(__aarch64__)
uint32_t getHwCap(int hwcap_type);
#endif
template <typename T>
bool IsContain(const std::vector<T> &vec, T element) {
for (auto iter = vec.begin(); iter != vec.end(); iter++) {
if (*iter == element) {
return true;
}
}
return false;
}
template <typename T>
bool VectorErase(std::vector<T> *vec, T element) {
bool ret = false;
for (auto iter = vec->begin(); iter != vec->end();) {
if (*iter == element) {
iter = vec->erase(iter);
ret = true;
} else {
iter++;
}
}
return ret;
}
template <typename T>
bool VectorReplace(std::vector<T> *vec, T srcElement, T dstElement) {
bool ret = false;
for (auto iter = vec->begin(); iter != vec->end(); iter++) {
if (*iter == srcElement) {
if (!IsContain(*vec, dstElement)) {
*iter = std::move(dstElement);
} else {
vec->erase(iter);
}
ret = true;
break;
}
}
return ret;
}
const char WHITESPACE[] = "\t\n\v\f\r ";
const char STR_TRUE[] = "true";
const char STR_FALSE[] = "false";
template <typename T>
Option<std::string> ToString(T t) {
std::ostringstream out;
out << t;
if (!out.good()) {
return Option<std::string>(None());
}
return Option<std::string>(out.str());
}
template <>
inline Option<std::string> ToString(bool value) {
return value ? Option<std::string>(STR_TRUE) : Option<std::string>(STR_FALSE);
}
// get the file name from a given path
// for example: "/usr/bin", we will get "bin"
inline std::string GetFileName(const std::string &path) {
char delim = '/';
size_t i = path.rfind(delim, path.length());
if (i != std::string::npos) {
return (path.substr(i + 1, path.length() - i));
}
return "";
}
// trim the white space character in a string
// see also: macro WHITESPACE defined above
inline void Trim(std::string *input) {
if (input == nullptr) {
return;
}
if (input->empty()) {
return;
}
input->erase(0, input->find_first_not_of(WHITESPACE));
input->erase(input->find_last_not_of(WHITESPACE) + 1);
}
// to judge whether a string is starting with prefix
// for example: "hello world" is starting with "hello"
inline bool StartsWithPrefix(const std::string &source, const std::string &prefix) {
if (source.length() < prefix.length()) {
return false;
}
return (source.compare(0, prefix.length(), prefix) == 0);
}
// split string
std::vector<std::string> StrSplit(const std::string &str, const std::string &pattern);
// tokenize string
std::vector<std::string> Tokenize(const std::string &src, const std::string &delimiters,
const Option<size_t> &maxTokenNum = Option<size_t>(None()));
enum Mode { PREFIX, SUFFIX, ANY };
// remove redundant charactor
std::string Remove(const std::string &from, const std::string &subStr, Mode mode = ANY);
template <typename T>
inline Option<T> GenericParseValue(const std::string &value) {
T ret;
std::istringstream input(value);
input >> ret;
if (input && input.eof()) {
return Option<T>(ret);
}
return Option<T>(None());
}
template <>
inline Option<std::string> GenericParseValue(const std::string &value) {
return Option<std::string>(value);
}
template <>
inline Option<bool> GenericParseValue(const std::string &value) {
if (value == "true") {
return Option<bool>(true);
} else if (value == "false") {
return Option<bool>(false);
}
return Option<bool>(None());
}
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_COMMON_UTILS_H_

View File

@ -0,0 +1,31 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "include/context.h"
#include "src/runtime/allocator.h"
namespace mindspore::lite {
Context::Context() { allocator = Allocator::Create(); }
Context::~Context() = default;
Context::Context(int threadNum, std::shared_ptr<Allocator> allocator, DeviceContext deviceCtx) {
this->allocator = std::move(allocator);
this->threadNum = threadNum;
this->deviceCtx = std::move(deviceCtx);
}
} // namespace mindspore::lite

View File

@ -0,0 +1,124 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "mindspore/lite/src/executor.h"
#include "src/runtime/kernel/arm/opclib/pack.h"
#include "include/errorcode.h"
namespace mindspore::lite {
int Executor::Run(std::vector<tensor::Tensor *> &inputs, std::vector<tensor::Tensor *> &outputs,
std::vector<kernel::LiteKernel *> &kernels, Allocator *allocator,
const kernel::KernelCallBack &before, const kernel::KernelCallBack &after) {
MS_ASSERT(nullptr != allocator);
for (auto &inTensor : inputs) {
if (inTensor == nullptr) {
MS_LOG(ERROR) << "Graph input tensor is nullptr";
return RET_ERROR;
}
if (inTensor->GetFormat() != schema::Format_NHWC) {
MS_LOG(ERROR) << "Model input tensor should be NHWC";
return RET_ERROR;
}
}
kernel::LiteKernelUtil::InitTensorRefCount(kernels);
for (auto *kernel : kernels) {
MS_ASSERT(nullptr != kernel);
auto &outputs = kernel->GetOutputs();
for (auto *output : outputs) {
MS_ASSERT(nullptr != output);
output->MallocData(allocator);
}
kernel::CallBackParam callbackParam;
callbackParam.name_callback_aram = kernel->Name();
if (before != nullptr) {
if (!before(kernel->GetInputs(), kernel->GetOutputs(), callbackParam)) {
MS_LOG(ERROR) << "run kernel before_callback failed, name: " << kernel->Name();
}
}
auto ret = kernel->Run();
if (0 != ret) {
MS_LOG(ERROR) << "run kernel failed, name: " << kernel->Name();
return ret;
}
if (after != nullptr) {
if (!after(kernel->GetInputs(), kernel->GetOutputs(), callbackParam)) {
MS_LOG(ERROR) << "run kernel after_callback failed, name: " << kernel->Name();
}
}
for (auto input_kernel : kernel->GetInKernels()) {
MS_EXCEPTION_IF_NULL(input_kernel);
ret = input_kernel->DecOutTensorRefCount(allocator);
if (0 != ret) {
MS_LOG(WARNING) << "DecOutTensorRefCount for kernel" << kernel->Name() << " failed";
}
}
}
return RET_OK;
}
int Executor::TransformTensorLayout(tensor::Tensor *tensor, schema::Format dst_format, Allocator *allocator) {
MS_ASSERT(nullptr != tensor);
MS_ASSERT(nullptr != allocator);
MS_ASSERT(4 == tensor->shape().size());
auto data_type = tensor->data_type();
switch (data_type) {
case kNumberTypeInt8:
return TransformTensorLayoutUint8(tensor, dst_format, allocator);
case kNumberTypeFloat32:
return TransformTensorLayoutFp32(tensor, dst_format, allocator);
}
return RET_OK;
}
int Executor::TransformTensorLayoutFp32(tensor::Tensor *tensor, schema::Format dst_format, Allocator *allocator) {
MS_ASSERT(nullptr != tensor);
MS_ASSERT(nullptr != allocator);
MS_ASSERT(4 == tensor->shape().size());
auto src_format = tensor->GetFormat();
if (src_format == schema::Format_NC4HW4 && dst_format == schema::Format_NHWC) {
auto *src_data = tensor->Data();
auto *dst_data = allocator->Malloc(tensor->Size());
if (dst_data == nullptr) {
MS_LOG(ERROR) << "Malloc data failed";
return RET_ERROR;
}
PackNC4HW4ToNHWCFp32(src_data, dst_data, tensor->Batch(), tensor->Height() * tensor->Width(), tensor->Channel());
tensor->SetData(dst_data);
tensor->SetFormat(dst_format);
allocator->Free(src_data);
return RET_OK;
} else {
MS_LOG(ERROR) << "Unsupport layout transform: " << schema::EnumNameFormat(tensor->GetFormat()) << " to "
<< schema::EnumNameFormat(dst_format) << " in float32";
return RET_ERROR;
}
}
int Executor::TransformTensorLayoutUint8(tensor::Tensor *tensor, schema::Format dst_format, Allocator *allocator) {
MS_ASSERT(nullptr != tensor);
MS_ASSERT(nullptr != allocator);
MS_ASSERT(4 == tensor->shape().size());
// auto src_format = tensor->GetFormat();
// todo
MS_LOG(ERROR) << "Unsupport layout transform: " << schema::EnumNameFormat(tensor->GetFormat()) << " to "
<< schema::EnumNameFormat(dst_format) << " in uint8";
return RET_ERROR;
}
} // namespace mindspore::lite

View File

@ -0,0 +1,48 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_SRC_EXECUTOR_H_
#define MINDSPORE_LITE_SRC_EXECUTOR_H_
#include <vector>
#include "src/runtime/allocator.h"
#include "src/lite_kernel.h"
namespace mindspore::lite {
class Executor {
public:
Executor() = default;
int Prepare(std::vector<kernel::LiteKernel *> &kernels) { return 0; }
int Run(std::vector<tensor::Tensor *> &inputs, std::vector<tensor::Tensor *> &outputs,
std::vector<kernel::LiteKernel *> &kernels, Allocator *allocator = nullptr,
const kernel::KernelCallBack &before = nullptr, const kernel::KernelCallBack &after = nullptr);
protected:
int TransformTensorLayoutFp32(tensor::Tensor *tensor, schema::Format dst_format, Allocator *allocator = nullptr);
int TransformTensorLayoutUint8(tensor::Tensor *tensor, schema::Format dst_format, Allocator *allocator = nullptr);
int TransformTensorLayout(tensor::Tensor *tensor, schema::Format dst_format, Allocator *allocator = nullptr);
protected:
Context *context = nullptr;
};
} // namespace mindspore::lite
#endif

View File

@ -0,0 +1,68 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/gllo/common/node_pass.h"
#include <unordered_set>
#include <deque>
#include <algorithm>
#include "ir/anf.h"
#include "ir/func_graph.h"
#include "ir/manager.h"
namespace mindspore {
namespace opt {
bool NodePass::Run(const FuncGraphPtr &func_graph) {
MS_EXCEPTION_IF_NULL(func_graph);
FuncGraphManagerPtr manager = func_graph->manager();
MS_EXCEPTION_IF_NULL(manager);
manager->AddFuncGraph(func_graph);
std::unordered_set<AnfNodePtr> seen_node;
std::deque<AnfNodePtr> todo{func_graph->output()};
bool changes = false;
while (!todo.empty()) {
AnfNodePtr node = todo.front();
todo.pop_front();
if (seen_node.count(node) > 0 || !manager->all_nodes().contains(node)) {
continue;
}
(void)seen_node.insert(node);
AnfNodePtr new_node = Run(func_graph, node);
bool change = (new_node != nullptr);
if (new_node != nullptr && new_node != node) {
(void)manager->Replace(node, new_node);
(void)seen_node.erase(node);
} else if (new_node == nullptr) {
new_node = node;
}
if (new_node && IsValueNode<FuncGraph>(new_node)) {
auto const_func_graph = GetValueNode<FuncGraphPtr>(new_node);
MS_EXCEPTION_IF_NULL(const_func_graph);
todo.push_back(const_func_graph->output());
} else if (new_node && new_node->isa<CNode>()) {
auto cnode = new_node->cast<CNodePtr>();
MS_EXCEPTION_IF_NULL(cnode);
auto inputs = cnode->inputs();
(void)todo.insert(todo.end(), inputs.begin(), inputs.end());
}
changes = changes || change;
}
return changes;
}
} // namespace opt
} // namespace mindspore

View File

@ -0,0 +1,36 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_SRC_PASS_COMMON_NODE_PASS_H_
#define MINDSPORE_LITE_SRC_PASS_COMMON_NODE_PASS_H_
#include <string>
#include <memory>
#include "src/gllo/common/pass.h"
namespace mindspore {
namespace opt {
// @brief ANF Node level optimization base pass
class NodePass : public Pass {
public:
explicit NodePass(const std::string &name) : Pass(name) {}
~NodePass() override = default;
bool Run(const FuncGraphPtr &func_graph) final;
virtual AnfNodePtr Run(const FuncGraphPtr &func_graph, const AnfNodePtr &node) = 0;
};
using NodePassPtr = std::shared_ptr<NodePass>;
} // namespace opt
} // namespace mindspore
#endif // MINDSPORE_LITE_SRC_PASS_COMMON_NODE_PASS_H_

Some files were not shown because too many files have changed in this diff Show More