!4873 [lite] add option to build full minddata

Merge pull request !4873 from 章一智/TRC-ms-lite
This commit is contained in:
mindspore-ci-bot 2020-08-29 04:04:10 +08:00 committed by Gitee
commit bbbee68556
35 changed files with 530 additions and 73 deletions

View File

@ -26,7 +26,7 @@ usage()
echo "bash build.sh [-d] [-r] [-v] [-c on|off] [-t on|off] [-g on|off] [-h] [-b ge] [-m infer|train] \\"
echo " [-a on|off] [-Q on|off] [-p on|off] [-i] [-L] [-R] [-D on|off] [-j[n]] [-e gpu|d|cpu] \\"
echo " [-P on|off] [-z [on|off]] [-M on|off] [-V 9.2|10.1] [-I arm64|arm32|x86_64] [-K] \\"
echo " [-B on|off] [-w on|off] [-E] [-l on|off] [-n]"
echo " [-B on|off] [-w on|off] [-E] [-l on|off] [-n full|lite|off]"
echo ""
echo "Options:"
echo " -d Debug mode"
@ -50,10 +50,10 @@ usage()
echo " -Q Enable dump memory, default off"
echo " -D Enable dumping of function graph ir, default on"
echo " -z Compile dataset & mindrecord, default on"
echo " -n Compile minddata lite"
echo " -n Compile minddata with mindspore-lite, available: off, lite, full, default is lite"
echo " -M Enable MPI and NCCL for GPU training, gpu default on"
echo " -V Specify the minimum required cuda version, default CUDA 10.1"
echo " -I Compile lite"
echo " -I Compile mindspore-lite"
echo " -K Compile with AKG, default on"
echo " -s Enable serving module, default off"
echo " -w Enable acl module, default off"
@ -93,7 +93,7 @@ checkopts()
ENABLE_DUMPE2E="off"
ENABLE_DUMP_IR="on"
COMPILE_MINDDATA="on"
COMPILE_MINDDATA_LITE="off"
COMPILE_MINDDATA_LITE="lite"
ENABLE_MPI="off"
CUDA_VERSION="10.1"
COMPILE_LITE="off"
@ -109,7 +109,7 @@ checkopts()
ENABLE_GPU="off"
# Process the options
while getopts 'drvj:c:t:hsb:a:g:p:ie:m:l:I:LRP:Q:D:zM:V:K:swB:EnT:' opt
while getopts 'drvj:c:t:hsb:a:g:p:ie:m:l:I:LRP:Q:D:zM:V:K:swB:En:T:' opt
do
OPTARG=$(echo ${OPTARG} | tr '[A-Z]' '[a-z]')
case "${opt}" in
@ -117,7 +117,13 @@ checkopts()
DEBUG_MODE="on"
;;
n)
COMPILE_MINDDATA_LITE="on"
if [[ "X$OPTARG" == "Xoff" || "X$OPTARG" == "Xlite" || "X$OPTARG" == "Xfull" ]]; then
COMPILE_MINDDATA_LITE="$OPTARG"
else
echo "Invalid value ${OPTARG} for option -n"
usage
exit 1
fi
;;
r)
DEBUG_MODE="off"
@ -577,7 +583,7 @@ build_lite()
build_flatbuffer
build_gtest
if [ "${COMPILE_MINDDATA_LITE}" == "on" ]; then
if [ "${COMPILE_MINDDATA_LITE}" == "lite" ] || [ "${COMPILE_MINDDATA_LITE}" == "full" ]; then
build_minddata_lite_deps
fi

View File

@ -13,24 +13,30 @@ set(TURBO_DIR_RUN_X86 ${MAIN_DIR}-${RUN_X86_COMPONENT_NAME}/third_party/libjpeg-
set(OPENCV_DIR_RUN_X86 ${MAIN_DIR}-${RUN_X86_COMPONENT_NAME}/third_party/opencv)
set(PROTOBF_DIR_RUN_X86 ${MAIN_DIR}-${RUN_X86_COMPONENT_NAME}/third_party/protobuf)
set(FLATBF_DIR_RUN_X86 ${MAIN_DIR}-${RUN_X86_COMPONENT_NAME}/third_party/flatbuffers)
if (BUILD_MINDDATA)
install(DIRECTORY ${TOP_DIR}/mindspore/ccsrc/minddata/dataset/include/ DESTINATION ${INC_DIR} COMPONENT ${COMPONENT_NAME} FILES_MATCHING PATTERN "*.h")
install(FILES ${TOP_DIR}/mindspore/lite/build/minddata/libminddata-lite.so DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
if (BUILD_MINDDATA STREQUAL "lite")
install(DIRECTORY ${TOP_DIR}/mindspore/ccsrc/minddata/dataset/include/ DESTINATION ${INC_DIR} COMPONENT ${COMPONENT_NAME} FILES_MATCHING PATTERN "*.h")
if (PLATFORM_ARM64)
install(FILES ${TOP_DIR}/mindspore/lite/build/minddata/libminddata-lite.so DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/libjpeg-turbo/lib/libjpeg.so DESTINATION ${TURBO_DIR}/lib COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/libjpeg-turbo/lib/libturbojpeg.so DESTINATION ${TURBO_DIR}/lib COMPONENT ${COMPONENT_NAME})
if (PLATFORM_ARM64)
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/arm64-v8a/libopencv_core.so DESTINATION ${OPENCV_DIR}/lib/arm64-v8a COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/arm64-v8a/libopencv_imgcodecs.so DESTINATION ${OPENCV_DIR}/lib/arm64-v8a COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/arm64-v8a/libopencv_imgproc.so DESTINATION ${OPENCV_DIR}/lib/arm64-v8a COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/arm64-v8a/libopencv_core.so DESTINATION ${OPENCV_DIR}/lib COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/arm64-v8a/libopencv_imgcodecs.so DESTINATION ${OPENCV_DIR}/lib COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/arm64-v8a/libopencv_imgproc.so DESTINATION ${OPENCV_DIR}/lib COMPONENT ${COMPONENT_NAME})
elseif (PLATFORM_ARM32)
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/armeabi-v7a/libopencv_core.so DESTINATION ${OPENCV_DIR}/lib/armeabi-v7a COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/armeabi-v7a/libopencv_imgcodecs.so DESTINATION ${OPENCV_DIR}/lib/armeabi-v7a COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/armeabi-v7a/libopencv_imgproc.so DESTINATION ${OPENCV_DIR}/lib/armeabi-v7a COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/minddata/libminddata-lite.so DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/libjpeg-turbo/lib/libjpeg.so DESTINATION ${TURBO_DIR}/lib COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/libjpeg-turbo/lib/libturbojpeg.so DESTINATION ${TURBO_DIR}/lib COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/armeabi-v7a/libopencv_core.so DESTINATION ${OPENCV_DIR}/lib COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/armeabi-v7a/libopencv_imgcodecs.so DESTINATION ${OPENCV_DIR}/lib COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/armeabi-v7a/libopencv_imgproc.so DESTINATION ${OPENCV_DIR}/lib COMPONENT ${COMPONENT_NAME})
else ()
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/libopencv_core.so DESTINATION ${OPENCV_DIR}/lib COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/libopencv_imgcodecs.so DESTINATION ${OPENCV_DIR}/lib COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/libopencv_imgproc.so DESTINATION ${OPENCV_DIR}/lib COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/minddata/libminddata-lite.so DESTINATION ${LIB_DIR_RUN_X86} COMPONENT ${RUN_X86_COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/libjpeg-turbo/lib/libjpeg.so.62.3.0 DESTINATION ${TURBO_DIR_RUN_X86}/lib RENAME libjpeg.so.62 COMPONENT ${RUN_X86_COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/libjpeg-turbo/lib/libturbojpeg.so.0.2.0 DESTINATION ${TURBO_DIR_RUN_X86}/lib RENAME libturbojpeg.so.0 COMPONENT ${RUN_X86_COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/libopencv_core.so.4.2.0 DESTINATION ${OPENCV_DIR_RUN_X86}/lib RENAME libopencv_core.so.4.2 COMPONENT ${RUN_X86_COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/libopencv_imgcodecs.so.4.2.0 DESTINATION ${OPENCV_DIR_RUN_X86}/lib RENAME libopencv_imgcodecs.so.4.2 COMPONENT ${RUN_X86_COMPONENT_NAME})
install(FILES ${TOP_DIR}/third_party/opencv/build/lib/libopencv_imgproc.so.4.2.0 DESTINATION ${OPENCV_DIR_RUN_X86}/lib RENAME libopencv_imgproc.so.4.2 COMPONENT ${RUN_X86_COMPONENT_NAME})
endif ()
endif ()

View File

@ -32,7 +32,9 @@
#include "minddata/dataset/engine/datasetops/source/mnist_op.h"
#include "minddata/dataset/engine/datasetops/source/random_data_op.h"
#include "minddata/dataset/engine/datasetops/source/text_file_op.h"
#ifndef ENABLE_ANDROID
#include "minddata/dataset/engine/datasetops/source/voc_op.h"
#endif
// Dataset operator headers (in alphabetical order)
#include "minddata/dataset/engine/datasetops/batch_op.h"
#include "minddata/dataset/engine/datasetops/build_vocab_op.h"
@ -200,6 +202,7 @@ std::shared_ptr<ImageFolderDataset> ImageFolder(const std::string &dataset_dir,
return ds->ValidateParams() ? ds : nullptr;
}
#ifndef ENABLE_ANDROID
// Function to create a ManifestDataset.
std::shared_ptr<ManifestDataset> Manifest(const std::string &dataset_file, const std::string &usage,
const std::shared_ptr<SamplerObj> &sampler,
@ -209,6 +212,7 @@ std::shared_ptr<ManifestDataset> Manifest(const std::string &dataset_file, const
// Call derived class validation method.
return ds->ValidateParams() ? ds : nullptr;
}
#endif
// Function to create a MnistDataset.
std::shared_ptr<MnistDataset> Mnist(const std::string &dataset_dir, const std::shared_ptr<SamplerObj> &sampler) {
@ -236,6 +240,7 @@ std::shared_ptr<TextFileDataset> TextFile(const std::vector<std::string> &datase
return ds->ValidateParams() ? ds : nullptr;
}
#ifndef ENABLE_ANDROID
// Function to create a VOCDataset.
std::shared_ptr<VOCDataset> VOC(const std::string &dataset_dir, const std::string &task, const std::string &mode,
const std::map<std::string, int32_t> &class_indexing, bool decode,
@ -245,6 +250,7 @@ std::shared_ptr<VOCDataset> VOC(const std::string &dataset_dir, const std::strin
// Call derived class validation method.
return ds->ValidateParams() ? ds : nullptr;
}
#endif
// Function to create a ZipDataset.
std::shared_ptr<ZipDataset> Zip(const std::vector<std::shared_ptr<Dataset>> &datasets) {
@ -274,6 +280,7 @@ std::shared_ptr<BatchDataset> Dataset::Batch(int32_t batch_size, bool drop_remai
return ds;
}
#ifndef ENABLE_ANDROID
// Function to create a Vocab from dataset
std::shared_ptr<Vocab> Dataset::BuildVocab(const std::vector<std::string> &columns,
const std::pair<int64_t, int64_t> &freq_range, int64_t top_k,
@ -304,6 +311,7 @@ std::shared_ptr<Vocab> Dataset::BuildVocab(const std::vector<std::string> &colum
return vocab;
}
#endif
// Function to create a Concat dataset
std::shared_ptr<ConcatDataset> Dataset::Concat(const std::vector<std::shared_ptr<Dataset>> &datasets) {
@ -1266,6 +1274,7 @@ std::vector<std::shared_ptr<DatasetOp>> ImageFolderDataset::Build() {
return node_ops;
}
#ifndef ENABLE_ANDROID
ManifestDataset::ManifestDataset(const std::string &dataset_file, const std::string &usage,
const std::shared_ptr<SamplerObj> &sampler,
const std::map<std::string, int32_t> &class_indexing, bool decode)
@ -1310,6 +1319,7 @@ std::vector<std::shared_ptr<DatasetOp>> ManifestDataset::Build() {
node_ops.push_back(manifest_op);
return node_ops;
}
#endif
MnistDataset::MnistDataset(std::string dataset_dir, std::shared_ptr<SamplerObj> sampler)
: dataset_dir_(dataset_dir), sampler_(sampler) {}
@ -1465,6 +1475,7 @@ std::vector<std::shared_ptr<DatasetOp>> TextFileDataset::Build() {
return node_ops;
}
#ifndef ENABLE_ANDROID
// Constructor for VOCDataset
VOCDataset::VOCDataset(const std::string &dataset_dir, const std::string &task, const std::string &mode,
const std::map<std::string, int32_t> &class_indexing, bool decode,
@ -1542,6 +1553,7 @@ std::vector<std::shared_ptr<DatasetOp>> VOCDataset::Build() {
node_ops.push_back(voc_op);
return node_ops;
}
#endif
// DERIVED DATASET CLASSES LEAF-NODE DATASETS
// (In alphabetical order)
@ -1578,6 +1590,7 @@ bool BatchDataset::ValidateParams() {
return true;
}
#ifndef ENABLE_ANDROID
BuildVocabDataset::BuildVocabDataset(std::shared_ptr<Vocab> vocab, const std::vector<std::string> &columns,
const std::pair<int64_t, int64_t> &freq_range, int64_t top_k,
const std::vector<std::string> &special_tokens, bool special_first)
@ -1616,6 +1629,7 @@ bool BuildVocabDataset::ValidateParams() {
}
return true;
}
#endif
// Function to build ConcatOp
ConcatDataset::ConcatDataset(const std::vector<std::shared_ptr<Dataset>> &datasets) : datasets_(datasets) {

View File

@ -42,7 +42,7 @@ MSTensor *DETensor::CreateFromMemory(TypeId data_type, const std::vector<int> &s
std::transform(shape.begin(), shape.end(), std::back_inserter(t_shape),
[](int s) -> dataset::dsize_t { return static_cast<dataset::dsize_t>(s); });
(void)dataset::Tensor::CreateFromMemory(dataset::TensorShape(t_shape), MSTypeToDEType(data_type),
(void)dataset::Tensor::CreateFromMemory(dataset::TensorShape(t_shape), dataset::MSTypeToDEType(data_type),
static_cast<uchar *>(data), &t);
return new DETensor(std::move(t));
}

View File

@ -370,6 +370,7 @@ Status ManifestOp::CountDatasetInfo() {
return Status::OK();
}
#ifdef ENABLE_PYTHON
Status ManifestOp::CountTotalRows(const std::string &file, const py::dict &dict, const std::string &usage,
int64_t *count, int64_t *numClasses) {
// the logic of counting the number of samples is copied from ParseManifestFile()
@ -412,6 +413,7 @@ Status ManifestOp::GetClassIndexing(const std::string &file, const py::dict &dic
return Status::OK();
}
#endif
// Visitor accept method for NodePass
Status ManifestOp::Accept(NodePass *p, bool *modified) {

View File

@ -165,12 +165,14 @@ class ManifestOp : public ParallelOp, public RandomAccessOp {
// @param show_all
void Print(std::ostream &out, bool show_all) const override;
#ifdef ENABLE_PYTHON
static Status CountTotalRows(const std::string &file, const py::dict &dict, const std::string &usage, int64_t *count,
int64_t *numClasses);
// Get str-to-int mapping from label name to index
static Status GetClassIndexing(const std::string &file, const py::dict &dict, const std::string &usage,
std::map<std::string, int32_t> *output_class_indexing);
#endif
/// \brief Base-class override for NodePass visitor acceptor
/// \param[in] p Pointer to the NodePass to be accepted

View File

@ -419,6 +419,7 @@ Status VOCOp::ReadAnnotationToTensor(const std::string &path, TensorRow *row) {
return Status::OK();
}
#ifdef ENABLE_PYTHON
Status VOCOp::CountTotalRows(const std::string &dir, const std::string &task_type, const std::string &task_mode,
const py::dict &dict, int64_t *count) {
if (task_type == "Detection") {
@ -467,6 +468,8 @@ Status VOCOp::GetClassIndexing(const std::string &dir, const std::string &task_t
return Status::OK();
}
#endif
// Visitor accept method for NodePass
Status VOCOp::Accept(NodePass *p, bool *modified) {
// Downcast shared pointer then call visitor

View File

@ -188,6 +188,7 @@ class VOCOp : public ParallelOp, public RandomAccessOp {
// @param show_all
void Print(std::ostream &out, bool show_all) const override;
#ifdef ENABLE_PYTHON
// @param const std::string &dir - VOC dir path
// @param const std::string &task_type - task type of reading voc job
// @param const std::string &task_mode - task mode of reading voc job
@ -204,6 +205,7 @@ class VOCOp : public ParallelOp, public RandomAccessOp {
// @param std::map<std::string, int32_t> *output_class_indexing - output class index of VOCDataset
static Status GetClassIndexing(const std::string &dir, const std::string &task_type, const std::string &task_mode,
const py::dict &dict, std::map<std::string, int32_t> *output_class_indexing);
#endif
/// \brief Base-class override for NodePass visitor acceptor
/// \param[in] p Pointer to the NodePass to be accepted

View File

@ -21,8 +21,10 @@
#include "minddata/dataset/util/task_manager.h"
#include "minddata/dataset/engine/opt/pass.h"
#include "minddata/dataset/engine/opt/pre/removal_pass.h"
#ifndef ENABLE_ANDROID
#include "minddata/dataset/engine/opt/pre/cache_transform_pass.h"
#include "minddata/dataset/engine/opt/post/repeat_pass.h"
#endif
#include "minddata/dataset/engine/opt/pre/epoch_injection_pass.h"
#include "mindspore/ccsrc/minddata/dataset/engine/opt/optional/tensor_op_fusion_pass.h"
#include "minddata/dataset/engine/perf/profiling.h"
@ -227,7 +229,9 @@ Status ExecutionTree::PrepareTreePreAction() {
MS_LOG(INFO) << "Running pre pass loops.";
pre_actions.push_back(std::make_unique<EpochInjectionPass>());
pre_actions.push_back(std::make_unique<RemovalPass>());
#ifndef ENABLE_ANDROID
pre_actions.push_back(std::make_unique<CacheTransformPass>());
#endif
// Apply pre action passes
for (auto &pass : pre_actions) {
RETURN_IF_NOT_OK(pass->Run(this, &modified));
@ -244,7 +248,9 @@ Status ExecutionTree::PrepareTreePostAction() {
std::vector<std::unique_ptr<Pass>> post_actions;
// Construct pre actions
MS_LOG(INFO) << "Running post pass loops.";
#ifndef ENABLE_ANDROID
post_actions.push_back(std::make_unique<RepeatPass>());
#endif
// Apply post action passes
for (auto &pass : post_actions) {

View File

@ -17,10 +17,12 @@
#include "minddata/dataset/engine/opt/pass.h"
#include "minddata/dataset/engine/datasetops/batch_op.h"
#include "minddata/dataset/engine/datasetops/build_vocab_op.h"
#ifndef ENABLE_ANDROID
#include "minddata/dataset/engine/datasetops/build_sentence_piece_vocab_op.h"
#include "minddata/dataset/engine/datasetops/cache_op.h"
#include "minddata/dataset/engine/datasetops/cache_merge_op.h"
#include "minddata/dataset/engine/datasetops/cache_lookup_op.h"
#endif
#include "minddata/dataset/engine/datasetops/dataset_op.h"
#include "minddata/dataset/engine/datasetops/device_queue_op.h"
#include "minddata/dataset/engine/datasetops/epoch_ctrl_op.h"
@ -205,10 +207,12 @@ Status NodePass::RunOnNode(std::shared_ptr<AlbumOp> node, bool *modified) {
return RunOnNode(std::static_pointer_cast<DatasetOp>(node), modified);
}
#ifndef ENABLE_ANDROID
Status NodePass::RunOnNode(std::shared_ptr<CacheOp> node, bool *modified) {
// Fallback to base class visitor by default
return RunOnNode(std::static_pointer_cast<DatasetOp>(node), modified);
}
#endif
Status NodePass::RunOnNode(std::shared_ptr<MnistOp> node, bool *modified) {
// Fallback to base class visitor by default
@ -235,6 +239,7 @@ Status NodePass::RunOnNode(std::shared_ptr<RepeatOp> node, bool *modified) {
return RunOnNode(std::static_pointer_cast<DatasetOp>(node), modified);
}
#ifndef ENABLE_ANDROID
Status NodePass::RunOnNode(std::shared_ptr<CacheMergeOp> node, bool *modified) {
// Fallback to base class visitor by default
return RunOnNode(std::static_pointer_cast<DatasetOp>(node), modified);
@ -244,6 +249,7 @@ Status NodePass::RunOnNode(std::shared_ptr<CacheLookupOp> node, bool *modified)
// Fallback to base class visitor by default
return RunOnNode(std::static_pointer_cast<DatasetOp>(node), modified);
}
#endif
Status NodePass::RunOnNode(std::shared_ptr<EpochCtrlOp> node, bool *modified) {
// Fallback to base class visitor by default
@ -255,6 +261,7 @@ Status NodePass::PreRunOnNode(std::shared_ptr<RepeatOp> node, bool *modified) {
return PreRunOnNode(std::static_pointer_cast<DatasetOp>(node), modified);
}
#ifndef ENABLE_ANDROID
Status NodePass::PreRunOnNode(std::shared_ptr<CacheOp> node, bool *modified) {
// Fallback to base class visitor by default
return PreRunOnNode(std::static_pointer_cast<DatasetOp>(node), modified);
@ -264,6 +271,7 @@ Status NodePass::PreRunOnNode(std::shared_ptr<CacheMergeOp> node, bool *modified
// Fallback to base class visitor by default
return PreRunOnNode(std::static_pointer_cast<DatasetOp>(node), modified);
}
#endif
Status NodePass::PreRunOnNode(std::shared_ptr<EpochCtrlOp> node, bool *modified) {
// Fallback to base class visitor by default
@ -275,9 +283,12 @@ Status NodePass::PreRunOnNode(std::shared_ptr<BuildVocabOp> node, bool *modified
return PreRunOnNode(std::static_pointer_cast<DatasetOp>(node), modified);
}
#ifndef ENABLE_ANDROID
Status NodePass::PreRunOnNode(std::shared_ptr<BuildSentencePieceVocabOp> node, bool *modified) {
// Fallback to base class visitor by default
return PreRunOnNode(std::static_pointer_cast<DatasetOp>(node), modified);
}
#endif
} // namespace dataset
} // namespace mindspore

View File

@ -63,7 +63,9 @@ class DeviceQueueOp;
class ImageFolderOp;
#ifndef ENABLE_ANDROID
class CacheOp;
#endif
class MnistOp;
@ -77,15 +79,19 @@ class CocoOp;
class CelebAOp;
#ifndef ENABLE_ANDROID
class CacheMergeOp;
class CacheLookupOp;
#endif
class EpochCtrlOp;
class BuildVocabOp;
#ifndef ENABLE_ANDROID
class BuildSentencePieceVocabOp;
#endif
// The base class Pass is the basic unit of tree transformation.
// The actual implementation of the passes will be derived from here.
@ -188,7 +194,9 @@ class NodePass : public Pass {
virtual Status RunOnNode(std::shared_ptr<DeviceQueueOp> node, bool *modified);
#ifndef ENABLE_ANDROID
virtual Status RunOnNode(std::shared_ptr<CacheOp> node, bool *modified);
#endif
virtual Status RunOnNode(std::shared_ptr<ImageFolderOp> node, bool *modified);
@ -202,23 +210,31 @@ class NodePass : public Pass {
virtual Status RunOnNode(std::shared_ptr<RepeatOp> node, bool *modified);
#ifndef ENABLE_ANDROID
virtual Status RunOnNode(std::shared_ptr<CacheMergeOp> node, bool *modified);
virtual Status RunOnNode(std::shared_ptr<CacheLookupOp> node, bool *modified);
#endif
virtual Status RunOnNode(std::shared_ptr<EpochCtrlOp> node, bool *modified);
#ifndef ENABLE_ANDROID
virtual Status PreRunOnNode(std::shared_ptr<CacheOp> node, bool *modified);
#endif
virtual Status PreRunOnNode(std::shared_ptr<RepeatOp> node, bool *modified);
#ifndef ENABLE_ANDROID
virtual Status PreRunOnNode(std::shared_ptr<CacheMergeOp> node, bool *modified);
#endif
virtual Status PreRunOnNode(std::shared_ptr<EpochCtrlOp> node, bool *modified);
virtual Status PreRunOnNode(std::shared_ptr<BuildVocabOp> node, bool *modified);
#ifndef ENABLE_ANDROID
virtual Status PreRunOnNode(std::shared_ptr<BuildSentencePieceVocabOp> node, bool *modified);
#endif
private:
// Helper function to perform DFS visit

View File

@ -33,6 +33,7 @@ Status EpochInjectionPass::InjectionFinder::PreRunOnNode(std::shared_ptr<BuildVo
return Status::OK();
}
#ifndef ENABLE_ANDROID
// Performs finder work for BuildSentencePieceVocabOp that has special rules about epoch control injection
Status EpochInjectionPass::InjectionFinder::PreRunOnNode(std::shared_ptr<BuildSentencePieceVocabOp> node,
bool *modified) {
@ -46,6 +47,7 @@ Status EpochInjectionPass::InjectionFinder::PreRunOnNode(std::shared_ptr<CacheOp
injection_point_ = nullptr;
return Status::OK();
}
#endif
Status EpochInjectionPass::InjectionFinder::RunOnNode(std::shared_ptr<DeviceQueueOp> node, bool *modified) {
// Assumption: There is only one DeviceQueueOp in a pipeline. This assumption is not validated here.

View File

@ -48,6 +48,7 @@ class EpochInjectionPass : public TreePass {
/// \return Status The error code return
Status PreRunOnNode(std::shared_ptr<BuildVocabOp> node, bool *modified) override;
#ifndef ENABLE_ANDROID
/// \brief Performs finder work for BuildSentencePieceVocabOp that has special rules about epoch control injection.
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
@ -60,6 +61,7 @@ class EpochInjectionPass : public TreePass {
/// \param[inout] modified Indicator if the node was changed at all
/// \return Status The error code return
Status PreRunOnNode(std::shared_ptr<CacheOp> node, bool *modified) override;
#endif
/// \brief Register the DeviceQueueOp for further action.
/// \param[in] node The node being visited

View File

@ -25,6 +25,7 @@ namespace dataset {
RemovalPass::RemovalNodes::RemovalNodes() : is_caching_(false) {}
#ifndef ENABLE_ANDROID
// Identifies the subtree below this node as a cached descendant tree.
Status RemovalPass::RemovalNodes::PreRunOnNode(std::shared_ptr<CacheOp> node, bool *modified) {
*modified = false;
@ -40,6 +41,7 @@ Status RemovalPass::RemovalNodes::RunOnNode(std::shared_ptr<CacheOp> node, bool
is_caching_ = false;
return Status::OK();
}
#endif
// Perform ShuffleOp removal check.
Status RemovalPass::RemovalNodes::RunOnNode(std::shared_ptr<ShuffleOp> node, bool *modified) {

View File

@ -42,6 +42,7 @@ class RemovalPass : public TreePass {
/// \brief Destructor
~RemovalNodes() = default;
#ifndef ENABLE_ANDROID
/// \brief Identifies the subtree below this node as a cached descendant tree.
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
@ -53,6 +54,7 @@ class RemovalPass : public TreePass {
/// \param[inout] modified Indicator if the node was changed at all
/// \return Status The error code return
Status RunOnNode(std::shared_ptr<CacheOp> node, bool *modified) override;
#endif
/// \brief Perform ShuffleOp removal check
/// \param[in] node The node being visited

View File

@ -30,7 +30,9 @@
#include "minddata/dataset/include/iterator.h"
#include "minddata/dataset/include/samplers.h"
#include "minddata/dataset/include/type_id.h"
#ifndef ENABLE_ANDROID
#include "minddata/dataset/text/vocab.h"
#endif
namespace mindspore {
namespace dataset {
@ -40,7 +42,9 @@ class DatasetOp;
class DataSchema;
class Tensor;
class TensorShape;
#ifndef ENABLE_ANDROID
class Vocab;
#endif
namespace api {
@ -57,14 +61,20 @@ class CocoDataset;
class CSVDataset;
class CsvBase;
class ImageFolderDataset;
#ifndef ENABLE_ANDROID
class ManifestDataset;
#endif
class MnistDataset;
class RandomDataset;
class TextFileDataset;
#ifndef ENABLE_ANDROID
class VOCDataset;
#endif
// Dataset Op classes (in alphabetical order)
class BatchDataset;
#ifndef ENABLE_ANDROID
class BuildVocabDataset;
#endif
class ConcatDataset;
class MapDataset;
class ProjectDataset;
@ -210,6 +220,7 @@ std::shared_ptr<ImageFolderDataset> ImageFolder(const std::string &dataset_dir,
const std::set<std::string> &extensions = {},
const std::map<std::string, int32_t> &class_indexing = {});
#ifndef ENABLE_ANDROID
/// \brief Function to create a ManifestDataset
/// \notes The generated dataset has two columns ['image', 'label']
/// \param[in] dataset_file The dataset file to be read
@ -224,6 +235,7 @@ std::shared_ptr<ManifestDataset> Manifest(const std::string &dataset_file, const
const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
const std::map<std::string, int32_t> &class_indexing = {},
bool decode = false);
#endif
/// \brief Function to create a MnistDataset
/// \notes The generated dataset has two columns ['image', 'label']
@ -276,6 +288,7 @@ std::shared_ptr<TextFileDataset> TextFile(const std::vector<std::string> &datase
ShuffleMode shuffle = ShuffleMode::kGlobal, int32_t num_shards = 1,
int32_t shard_id = 0);
#ifndef ENABLE_ANDROID
/// \brief Function to create a VOCDataset
/// \notes The generated dataset has multi-columns :
/// - task='Detection', column: [['image', dtype=uint8], ['bbox', dtype=float32], ['label', dtype=uint32],
@ -293,6 +306,7 @@ std::shared_ptr<VOCDataset> VOC(const std::string &dataset_dir, const std::strin
const std::string &mode = "train",
const std::map<std::string, int32_t> &class_indexing = {}, bool decode = false,
const std::shared_ptr<SamplerObj> &sampler = RandomSampler());
#endif
/// \brief Function to create a ZipDataset
/// \notes Applies zip to the dataset
@ -356,6 +370,7 @@ class Dataset : public std::enable_shared_from_this<Dataset> {
/// \return Shared pointer to the current BatchDataset
std::shared_ptr<BatchDataset> Batch(int32_t batch_size, bool drop_remainder = false);
#ifndef ENABLE_ANDROID
/// \brief Function to create a Vocab from source dataset
/// \notes Build a vocab from a dataset. This would collect all the unique words in a dataset and return a vocab
/// which contains top_k most frequent words (if top_k is specified)
@ -373,6 +388,7 @@ class Dataset : public std::enable_shared_from_this<Dataset> {
const std::pair<int64_t, int64_t> &freq_range = {0, kDeMaxFreq},
int64_t top_k = kDeMaxTopk, const std::vector<std::string> &special_tokens = {},
bool special_first = true);
#endif
/// \brief Function to create a ConcatDataset
/// \notes Concat the datasets in the input
@ -753,6 +769,7 @@ class ImageFolderDataset : public Dataset {
std::set<std::string> exts_;
};
#ifndef ENABLE_ANDROID
class ManifestDataset : public Dataset {
public:
/// \brief Constructor
@ -777,6 +794,7 @@ class ManifestDataset : public Dataset {
std::map<std::string, int32_t> class_index_;
std::shared_ptr<SamplerObj> sampler_;
};
#endif
class MnistDataset : public Dataset {
public:
@ -873,6 +891,7 @@ class TextFileDataset : public Dataset {
ShuffleMode shuffle_;
};
#ifndef ENABLE_ANDROID
class VOCDataset : public Dataset {
public:
/// \brief Constructor
@ -904,6 +923,7 @@ class VOCDataset : public Dataset {
bool decode_;
std::shared_ptr<SamplerObj> sampler_;
};
#endif
// DERIVED DATASET CLASSES FOR DATASET OPS
// (In alphabetical order)
@ -933,6 +953,7 @@ class BatchDataset : public Dataset {
std::map<std::string, std::pair<TensorShape, std::shared_ptr<Tensor>>> pad_map_;
};
#ifndef ENABLE_ANDROID
class BuildVocabDataset : public Dataset {
public:
/// \brief Constructor
@ -959,6 +980,7 @@ class BuildVocabDataset : public Dataset {
std::vector<std::string> special_tokens_;
bool special_first_;
};
#endif
class ConcatDataset : public Dataset {
public:

View File

@ -187,7 +187,7 @@ void JpegSetSource(j_decompress_ptr cinfo, const void *data, int64_t datasize) {
(*cinfo->mem->alloc_small)(reinterpret_cast<j_common_ptr>(cinfo), JPOOL_PERMANENT, sizeof(struct jpeg_source_mgr)));
cinfo->src->init_source = JpegInitSource;
cinfo->src->fill_input_buffer = JpegFillInputBuffer;
#if defined(_WIN32) || defined(_WIN64)
#if defined(_WIN32) || defined(_WIN64) || defined(ENABLE_ARM32)
cinfo->src->skip_input_data = reinterpret_cast<void (*)(j_decompress_ptr, long)>(JpegSkipInputData);
#else
cinfo->src->skip_input_data = JpegSkipInputData;

View File

@ -31,6 +31,7 @@ WordIdType Vocab::Lookup(const WordType &word) const {
return itr == word2id_.end() ? kNoTokenExists : itr->second;
}
#ifdef ENABLE_PYTHON
Status Vocab::BuildFromPyList(const py::list &words, const py::list &special_tokens, bool prepend_special,
std::shared_ptr<Vocab> *vocab) {
// check of duplication on both words and special_tokens will be performed in python
@ -61,6 +62,7 @@ Status Vocab::BuildFromPyDict(const py::dict &words, std::shared_ptr<Vocab> *voc
*vocab = std::make_shared<Vocab>(std::move(word2id));
return Status::OK();
}
#endif
void Vocab::append_word(const std::string &word) {
if (word2id_.find(word) == word2id_.end()) {

View File

@ -23,18 +23,23 @@
#include <vector>
#include "minddata/dataset/util/status.h"
#ifdef ENABLE_PYTHON
#include "pybind11/pybind11.h"
#include "pybind11/stl.h"
#endif
namespace mindspore {
namespace dataset {
#ifdef ENABLE_PYTHON
namespace py = pybind11;
#endif
using WordIdType = int32_t;
using WordType = std::string;
class Vocab {
public:
#ifdef ENABLE_PYTHON
// Build a vocab from a python dictionary key is each word ,id needs to start from 2, no duplicate and continuous
// @param const py::dict &words - a dictionary containing word, word id pair.
// @param std::shared_ptr<Vocab> *vocab - return value, vocab object
@ -56,6 +61,7 @@ class Vocab {
// @return error code
static Status BuildFromFile(const std::string &path, const std::string &delimiter, int32_t vocab_size,
const py::list &special_tokens, bool prepend_special, std::shared_ptr<Vocab> *vocab);
#endif
/// \brief Build a vocab from a c++ map. id needs to start from 2, no duplicate and continuous
/// \param[in] words An unordered_map containing word, word id pair.

View File

@ -16,7 +16,7 @@
#include "minddata/dataset/util/services.h"
#include <limits.h>
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
#include <sys/syscall.h>
#else
#include <stdlib.h>
@ -31,7 +31,7 @@ namespace dataset {
std::unique_ptr<Services> Services::instance_ = nullptr;
std::once_flag Services::init_instance_flag_;
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
std::string Services::GetUserName() {
char user[LOGIN_NAME_MAX];
(void)getlogin_r(user, sizeof(user));

View File

@ -63,7 +63,7 @@ class Services {
std::shared_ptr<MemoryPool> GetServiceMemPool() { return pool_; }
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
static std::string GetUserName();
static std::string GetHostName();

View File

@ -16,7 +16,7 @@
#include "minddata/dataset/util/sig_handler.h"
#include <signal.h>
#include <sys/types.h>
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
#include <ucontext.h>
#endif
#include <unistd.h>
@ -25,7 +25,7 @@
namespace mindspore {
namespace dataset {
// Register the custom signal handlers
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
void RegisterHandlers() {
struct sigaction new_int_action;

View File

@ -22,7 +22,7 @@
namespace mindspore {
namespace dataset {
// Register the custom signal handlers
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
extern void RegisterHandlers();
// A signal handler for SIGINT. Drives interrupt to watchdog

View File

@ -17,6 +17,9 @@
#include "utils/ms_utils.h"
#include "minddata/dataset/util/task_manager.h"
#include "utils/log_adapter.h"
#if defined(__ANDROID__) || defined(ANDROID)
#include "minddata/dataset/util/services.h"
#endif
namespace mindspore {
namespace dataset {
@ -29,6 +32,10 @@ void Task::operator()() {
id_ = this_thread::get_id();
std::stringstream ss;
ss << id_;
#if defined(__ANDROID__) || defined(ANDROID)
// The thread id in Linux may be duplicate
ss << Services::GetUniqueID();
#endif
MS_LOG(DEBUG) << my_name_ << " Thread ID " << ss.str() << " Started.";
try {
// Previously there is a timing hole where the thread is spawn but hit error immediately before we can set

View File

@ -124,6 +124,7 @@ TaskManager::TaskManager() try : global_interrupt_(0),
master_->is_master_ = true;
#if !defined(_WIN32) && !defined(_WIN64)
gMyTask = master_.get();
#if !defined(__ANDROID__) && !defined(ANDROID)
// Initialize the semaphore for the watchdog
errno_t rc = sem_init(&sem_, 0, 0);
if (rc == -1) {
@ -131,6 +132,7 @@ TaskManager::TaskManager() try : global_interrupt_(0),
std::terminate();
}
#endif
#endif
} catch (const std::exception &e) {
MS_LOG(ERROR) << "MindData initialization failed: " << e.what() << ".";
std::terminate();
@ -145,14 +147,14 @@ TaskManager::~TaskManager() {
watchdog_grp_ = nullptr;
watchdog_ = nullptr;
}
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
(void)sem_destroy(&sem_);
#endif
}
Status TaskManager::DoServiceStart() {
MS_LOG(INFO) << "Starting Task Manager.";
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
// Create a watchdog for control-c
std::shared_ptr<MemoryPool> mp = Services::GetInstance().GetServiceMemPool();
// A dummy group just for the watchdog. We aren't really using it. But most code assumes a thread must
@ -181,7 +183,7 @@ Status TaskManager::DoServiceStop() {
Status TaskManager::WatchDog() {
TaskManager::FindMe()->Post();
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
errno_t err = sem_wait(&sem_);
if (err == -1) {
RETURN_STATUS_UNEXPECTED("Errno = " + std::to_string(errno));

View File

@ -16,7 +16,7 @@
#ifndef MINDSPORE_CCSRC_MINDDATA_DATASET_UTIL_TASK_MANAGER_H_
#define MINDSPORE_CCSRC_MINDDATA_DATASET_UTIL_TASK_MANAGER_H_
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
#include <semaphore.h>
#include <signal.h> // for sig_atomic_t
#endif
@ -92,7 +92,7 @@ class TaskManager : public Service {
static void InterruptMaster(const Status &rc = Status::OK());
static void WakeUpWatchDog() {
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
TaskManager &tm = TaskManager::GetInstance();
(void)sem_post(&tm.sem_);
#endif
@ -113,7 +113,7 @@ class TaskManager : public Service {
std::shared_ptr<Task> master_;
List<Task> lru_;
List<Task> free_lst_;
#if !defined(_WIN32) && !defined(_WIN64)
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID)
sem_t sem_;
#endif
TaskGroup *watchdog_grp_;

View File

@ -78,7 +78,7 @@ class Platform {
#if defined(SYSTEM_ENV_POSIX)
platform_ = kPlatformPosix;
#elif defined(SYSTEM_ENV_POSIX_ANDROID)
platform_ = kPlatformPosixAndroid
platform_ = kPlatformPosixAndroid;
#elif defined(SYSTEM_ENV_WINDOWS)
platform_ = kPlatformWindows;
#endif

View File

@ -62,7 +62,8 @@ option(BUILD_CONVERTER "if build converter" on)
option(ENABLE_FP16 "if build fp16 ops" off)
option(SUPPORT_GPU "if support gpu" off)
option(OFFLINE_COMPILE "if offline compile OpenCL kernel" off)
option(BUILD_MINDDATA "" off)
set(BUILD_MINDDATA "off" CACHE STRING "off, lite, or full")
option(BUILD_MINDDATA_EXAMPLE "" on)
set(CMAKE_VERBOSE_MAKEFILE on)
add_compile_definitions(USE_ANDROID_LOG)
@ -172,7 +173,9 @@ if (BUILD_DEVICE)
endif ()
endif ()
if (BUILD_MINDDATA)
if (BUILD_MINDDATA STREQUAL "lite" OR BUILD_MINDDATA STREQUAL "full")
# TODO: add sentencepiece dependency
#include(${TOP_DIR}/cmake/external_libs/sentencepiece.cmake)
# opencv
set(OpenCV_DIR ${TOP_DIR}/third_party/opencv/build)
find_package(OpenCV REQUIRED)

View File

@ -1,27 +1,190 @@
set(MINDDATA_DIR ${CCSRC_DIR}/minddata/dataset)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC -Wall -Wno-deprecated-declarations")
set(CMAKE_CXX_FLAGS_DEBUG "$ENV{CXXFLAGS} -O0 -g2 -ggdb")
set(CMAKE_CXX_FLAGS_DEBUG "$ENV{CXXFLAGS} -O0 -g2 -ggdb -fno-inline-functions -fno-omit-frame-pointer -D_LIBCPP_INLINE_VISIBILITY='' -D_LIBCPP_DISABLE_EXTERN_TEMPLATE=1 -DHALF_ENABLE_CPP11_USER_LITERALS=0 -D_FORTIFY_SOURCE=2 -Wno-cpp")
set(CMAKE_CXX_FLAGS "$ENV{CXXFLAGS} -I/usr/local/include -std=c++17 -Wall -fPIC")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OPTION_CXX_FLAGS}")
if (PLATFORM_ARM64 OR PLATFORM_ARM32)
set(CMAKE_CXX_FLAGS_RELEASE "$ENV{CXXFLAGS} -O2 -Werror -Wno-return-std-move -Wno-unused-private-field -Wno-unused-lambda-capture -Wno-sign-compare -Wno-overloaded-virtual -Wno-unneeded-internal-declaration -Wno-unused-variable -Wno-pessimizing-move -Wno-inconsistent-missing-override -DHALF_ENABLE_CPP11_USER_LITERALS=0 -D_FORTIFY_SOURCE=2")
else()
set(CMAKE_CXX_FLAGS_RELEASE "$ENV{CXXFLAGS} -O2 -Wl,--allow-shlib-undefined -DHALF_ENABLE_CPP11_USER_LITERALS=0 -D_FORTIFY_SOURCE=2")
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-non-virtual-dtor")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-sign-compare")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unused-private-field")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-reorder")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-switch")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-sequence-point")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unused-variable")
if (PLATFORM_ARM64 OR PLATFORM_ARM32)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-delete-non-virtual-dtor")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-uninitialized")
else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-delete-non-abstract-non-virtual-dtor")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-maybe-uninitialized")
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-format")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-attributes")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=default")
set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${CMAKE_SHARED_LINKER_FLAGS} -s")
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/api MINDDATA_API_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/callback MINDDATA_CALLBACK_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/core MINDDATA_CORE_SRC_FILES)
list(REMOVE_ITEM MINDDATA_CORE_SRC_FILES "${MINDDATA_DIR}/core/client.cc")
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine MINDDATA_ENGINE_SRC_FILES)
# TODO: add cache
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine/cache MINDDATA_ENGINE_CACHE_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine/datasetops MINDDATA_ENGINE_DATASETOPS_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine/datasetops/map_op MINDDATA_ENGINE_DATASETOPS_MAPOP_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine/datasetops/source MINDDATA_ENGINE_DATASETOPS_SOURCE_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine/datasetops/source/sampler MINDDATA_ENGINE_DATASETOPS_SOURCE_SAMPLER_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine/opt MINDDATA_ENGINE_OPT_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine/opt/optional MINDDATA_ENGINE_OPT_OPTIONAL_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine/opt/post MINDDATA_ENGINE_OPT_POST_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine/opt/pre MINDDATA_ENGINE_OPT_PRE_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine/opt/util MINDDATA_ENGINE_OPT_UTIL_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/engine/perf MINDDATA_ENGINE_PERF_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/kernels MINDDATA_KERNELS_SRC_FILES)
list(REMOVE_ITEM MINDDATA_KERNELS_SRC_FILES "${MINDDATA_DIR}/kernels/py_func_op.cc")
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/kernels/image MINDDATA_KERNELS_IMAGE_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/kernels/data MINDDATA_KERNELS_DATA_SRC_FILES)
add_library(minddata-eager OBJECT
# TODO: add text src
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/text MINDDATA_TEXT_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/text/kernels MINDDATA_TEXT_KERNELS_SRC_FILES)
AUX_SOURCE_DIRECTORY(${MINDDATA_DIR}/util MINDDATA_UTIL_SRC_FILES)
if (BUILD_MINDDATA STREQUAL "full")
list(REMOVE_ITEM MINDDATA_API_SRC_FILES
"${MINDDATA_DIR}/api/text.cc"
"${MINDDATA_DIR}/api/de_tensor.cc"
"${MINDDATA_DIR}/api/execute.cc"
)
list(REMOVE_ITEM MINDDATA_CALLBACK_SRC_FILES
"${MINDDATA_DIR}/callback/py_ds_callback.cc"
)
list(REMOVE_ITEM MINDDATA_KERNELS_SRC_FILES "${MINDDATA_DIR}/kernels/py_func_op.cc")
list(REMOVE_ITEM MINDDATA_ENGINE_DATASETOPS_SRC_FILES
"${MINDDATA_DIR}/engine/datasetops/build_sentence_piece_vocab_op.cc"
"${MINDDATA_DIR}/engine/datasetops/filter_op.cc"
"${MINDDATA_DIR}/engine/datasetops/barrier_op.cc"
"${MINDDATA_DIR}/engine/datasetops/bucket_batch_by_length_op.cc"
"${MINDDATA_DIR}/engine/datasetops/build_vocab_op.cc"
"${MINDDATA_DIR}/engine/datasetops/cache_merge_op.cc"
"${MINDDATA_DIR}/engine/datasetops/cache_base_op.cc"
"${MINDDATA_DIR}/engine/datasetops/cache_lookup_op.cc"
"${MINDDATA_DIR}/engine/datasetops/cache_op.cc"
)
list(REMOVE_ITEM MINDDATA_ENGINE_DATASETOPS_SOURCE_SRC_FILES
"${MINDDATA_DIR}/engine/datasetops/source/generator_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/voc_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/manifest_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/mindrecord_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/tf_reader_op.cc"
)
list(REMOVE_ITEM MINDDATA_ENGINE_DATASETOPS_SOURCE_SAMPLER_SRC_FILES
"${MINDDATA_DIR}/engine/datasetops/source/sampler/python_sampler.cc"
)
list(REMOVE_ITEM MINDDATA_ENGINE_OPT_POST_SRC_FILES
"${MINDDATA_DIR}/engine/opt/post/repeat_pass.cc"
)
list(REMOVE_ITEM MINDDATA_ENGINE_OPT_PRE_SRC_FILES
"${MINDDATA_DIR}/engine/opt/pre/cache_transform_pass.cc"
)
include_directories("${CMAKE_BINARY_DIR}/minddata/dataset/engine/cache")
if (BUILD_MINDDATA_EXAMPLE AND (PLATFORM_ARM32 OR PLATFORM_ARM64))
set(MINDDATA_EXAMPLE_SRC ${CMAKE_CURRENT_SOURCE_DIR}/example/jni-example.cc)
endif()
add_library(minddata-lite SHARED
${MINDDATA_API_SRC_FILES}
${MINDDATA_CALLBACK_SRC_FILES}
${MINDDATA_CORE_SRC_FILES}
${MINDDATA_ENGINE_SRC_FILES}
#${MINDDATA_ENGINE_CACHE_SRC_FILES}
${MINDDATA_ENGINE_DATASETOPS_SRC_FILES}
${MINDDATA_ENGINE_DATASETOPS_MAPOP_SRC_FILES}
${MINDDATA_ENGINE_DATASETOPS_SOURCE_SRC_FILES}
${MINDDATA_ENGINE_DATASETOPS_SOURCE_SAMPLER_SRC_FILES}
${MINDDATA_ENGINE_OPT_SRC_FILES}
${MINDDATA_ENGINE_OPT_OPTIONAL_SRC_FILES}
${MINDDATA_ENGINE_OPT_POST_SRC_FILES}
${MINDDATA_ENGINE_OPT_PRE_SRC_FILES}
${MINDDATA_ENGINE_OPT_UTIL_SRC_FILES}
${MINDDATA_ENGINE_PERF_SRC_FILES}
${MINDDATA_KERNELS_SRC_FILES}
${MINDDATA_KERNELS_IMAGE_SRC_FILES}
${MINDDATA_KERNELS_DATA_SRC_FILES}
${MINDDATA_UTIL_SRC_FILES}
${MINDDATA_EXAMPLE_SRC}
${CMAKE_CURRENT_SOURCE_DIR}/../src/common/log_adapter.cc
${CORE_DIR}/utils/ms_utils.cc
${CORE_DIR}/gvar/logging_level.cc
${CCSRC_DIR}/utils/system/crc32c.cc
)
target_link_libraries(minddata-lite
securec
jpeg-turbo
jpeg
opencv_core
opencv_imgcodecs
opencv_imgproc
mindspore::json
)
# ref: https://github.com/android/ndk/issues/1202
if (PLATFORM_ARM32)
file(GLOB_RECURSE LIBCLANG_RT_LIB $ENV{ANDROID_NDK}/libclang_rt.builtins-arm-android.a)
if (LIBCLANG_RT_LIB STREQUAL "")
MESSAGE(FATAL_ERROR "Cannot find libclang_rt.builtins-arm-androi2d.a in $ENV{ANDROID_NDK}")
endif()
target_link_libraries(minddata-lite ${LIBCLANG_RT_LIB})
endif()
if (PLATFORM_ARM32 OR PLATFORM_ARM64)
target_link_libraries(minddata-lite log)
elseif (BUILD_MINDDATA_EXAMPLE)
add_executable(mdlite-example ${CMAKE_CURRENT_SOURCE_DIR}/example/x86-example.cc)
target_link_libraries(mdlite-example minddata-lite)
add_custom_command(TARGET mdlite-example POST_BUILD
COMMAND cp -rf ${CMAKE_CURRENT_SOURCE_DIR}/example/testCifar10Data ${CMAKE_BINARY_DIR}/minddata
)
endif()
elseif (BUILD_MINDDATA STREQUAL "lite")
list(REMOVE_ITEM MINDDATA_CORE_SRC_FILES "${MINDDATA_DIR}/core/client.cc")
list(REMOVE_ITEM MINDDATA_KERNELS_SRC_FILES "${MINDDATA_DIR}/kernels/py_func_op.cc")
add_library(minddata-eager OBJECT
${MINDDATA_DIR}/api/de_tensor.cc
${MINDDATA_DIR}/api/execute.cc
)
add_library(minddata-lite SHARED
add_library(minddata-lite SHARED
${MINDDATA_CORE_SRC_FILES}
${MINDDATA_KERNELS_SRC_FILES}
${MINDDATA_KERNELS_IMAGE_SRC_FILES}
@ -34,7 +197,7 @@ add_library(minddata-lite SHARED
${CORE_DIR}/gvar/logging_level.cc
)
target_link_libraries(minddata-lite
target_link_libraries(minddata-lite
securec
jpeg-turbo
jpeg
@ -43,3 +206,13 @@ target_link_libraries(minddata-lite
opencv_imgproc
mindspore::json
)
# ref: https://github.com/android/ndk/issues/1202
if (PLATFORM_ARM32)
file(GLOB_RECURSE LIBCLANG_RT_LIB $ENV{ANDROID_NDK}/libclang_rt.builtins-arm-android.a)
if (LIBCLANG_RT_LIB STREQUAL "")
MESSAGE(FATAL_ERROR "Cannot find libclang_rt.builtins-arm-androi2d.a in $ENV{ANDROID_NDK}")
endif()
target_link_libraries(minddata-lite ${LIBCLANG_RT_LIB})
endif()
endif()

View File

@ -0,0 +1,95 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <string>
#include <fstream>
#include <iostream>
#include <memory>
#include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/util/path.h"
#if defined(__ANDROID__) || defined(ANDROID)
#include <android/log.h>
#include <android/asset_manager.h>
#endif
extern "C" JNIEXPORT jstring JNICALL
Java_com_example_mindsporepredict_MainActivity_stringFromJNI(
JNIEnv* env,
jobject /* this */) {
std::string hello = "Hello World!";
MS_LOG(DEBUG) << hello;
return env->NewStringUTF(hello.c_str());
}
using Dataset = mindspore::dataset::api::Dataset;
using Iterator = mindspore::dataset::api::Iterator;
using mindspore::dataset::Tensor;
using mindspore::dataset::Path;
using mindspore::dataset::api::Cifar10;
using mindspore::dataset::api::RandomSampler;
extern "C" JNIEXPORT void JNICALL
Java_com_example_mindsporepredict_MainActivity_pathTest(
JNIEnv* env,
jobject /* this */,
jstring path) {
MS_LOG(WARNING) << env->GetStringUTFChars(path, 0);
Path f(env->GetStringUTFChars(path, 0));
MS_LOG(WARNING) << f.Exists() << f.IsDirectory() << f.ParentPath();
// Print out the first few items in the directory
auto dir_it = Path::DirIterator::OpenDirectory(&f);
MS_LOG(WARNING) << dir_it.get();
int i = 0;
while (dir_it->hasNext()) {
Path v = dir_it->next();
MS_LOG(WARNING) << v.toString();
i++;
if (i > 5)
break;
}
}
extern "C" JNIEXPORT void JNICALL
Java_com_example_mindsporepredict_MainActivity_TestCifar10Dataset(
JNIEnv* env,
jobject /* this */,
jstring path) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10Dataset.";
// Create a Cifar10 Dataset
std::string folder_path = env->GetStringUTFChars(path, 0);
std::shared_ptr<Dataset> ds = Cifar10(folder_path, RandomSampler(false, 10));
// Create an iterator over the result of the above dataset
// This will trigger the creation of the Execution Tree and launch it.
std::shared_ptr<Iterator> iter = ds->CreateIterator();
// Iterate the dataset and get each row
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row);
uint64_t i = 0;
while (row.size() != 0) {
i++;
auto image = row["image"];
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
iter->GetNextRow(&row);
}
// Manually terminate the pipeline
iter->Stop();
}

View File

@ -0,0 +1,55 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <string>
#include <fstream>
#include <iostream>
#include <memory>
#include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/util/path.h"
using Dataset = mindspore::dataset::api::Dataset;
using Iterator = mindspore::dataset::api::Iterator;
using mindspore::dataset::Tensor;
using mindspore::dataset::api::Cifar10;
using mindspore::dataset::api::RandomSampler;
int main() {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10Dataset.";
// Create a Cifar10 Dataset
std::string folder_path = "./testCifar10Data/";
std::shared_ptr<Dataset> ds = Cifar10(folder_path, RandomSampler(false, 10));
// Create an iterator over the result of the above dataset
// This will trigger the creation of the Execution Tree and launch it.
std::shared_ptr<Iterator> iter = ds->CreateIterator();
// Iterate the dataset and get each row
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row);
uint64_t i = 0;
while (row.size() != 0) {
i++;
auto image = row["image"];
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
iter->GetNextRow(&row);
}
// Manually terminate the pipeline
iter->Stop();
}

View File

@ -59,7 +59,7 @@ add_subdirectory(runtime/kernel/arm)
if (PLATFORM_ARM32 OR PLATFORM_ARM64)
target_link_libraries(mindspore-lite log)
endif ()
if (BUILD_MINDDATA)
if (BUILD_MINDDATA STREQUAL "lite")
target_link_libraries(mindspore-lite minddata-eager minddata-lite)
endif ()

View File

@ -163,13 +163,14 @@ if (SUPPORT_GPU)
)
endif()
### minddata lite
if (BUILD_MINDDATA)
if (BUILD_MINDDATA STREQUAL "lite")
include_directories(${CCSRC_DIR}/minddata)
set(DATASET_TEST_DIR ${TEST_DIR}/ut/src/dataset)
set(TEST_MINDDATA_SRC
${DATASET_TEST_DIR}/de_tensor_test.cc
${DATASET_TEST_DIR}/eager_test.cc
)
elseif (BUILD_MINDDATA STREQUAL "full")
endif()
### runtime framework
file(GLOB_RECURSE OPS_SRC ${LITE_DIR}/src/ops/*.cc)
@ -355,7 +356,7 @@ endif ()
add_executable(lite-test ${TEST_SRC})
target_link_libraries(lite-test dl ${GTEST_LIBRARY})
if (BUILD_MINDDATA)
if (BUILD_MINDDATA STREQUAL "lite")
target_link_libraries(lite-test
minddata-lite
minddata-eager
@ -363,6 +364,10 @@ if (BUILD_MINDDATA)
if (PLATFORM_ARM32 OR PLATFORM_ARM64)
target_link_libraries(lite-test log)
endif()
elseif(BUILD_MINDDATA STREQUAL "full")
if (PLATFORM_ARM32 OR PLATFORM_ARM64)
target_link_libraries(lite-test log)
endif()
endif()
if (BUILD_CONVERTER)
target_link_libraries(lite-test

View File

@ -11,8 +11,8 @@ function Run_x86() {
echo ${model_name} >> "${run_benchmark_log_file}"
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86} >> "{run_benchmark_log_file}"
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 >> "${run_benchmark_log_file}"
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 >> "${run_benchmark_log_file}"
if [ $? = 0 ]; then
run_result='x86: '${model_name}' pass'
echo ${run_result} >> ${run_benchmark_result_file}
@ -32,8 +32,8 @@ function Run_x86() {
echo ${model_name} >> "${run_benchmark_log_file}"
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86} >> "${run_benchmark_log_file}"
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 >> "${run_benchmark_log_file}"
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 >> "${run_benchmark_log_file}"
if [ $? = 0 ]; then
run_result='x86: '${model_name}' pass'
echo ${run_result} >> ${run_benchmark_result_file}
@ -53,8 +53,8 @@ function Run_x86() {
echo ${model_name} >> "${run_benchmark_log_file}"
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86} >> "${run_benchmark_log_file}"
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 >> "${run_benchmark_log_file}"
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 >> "${run_benchmark_log_file}"
if [ $? = 0 ]; then
run_result='x86: '${model_name}' pass'
echo ${run_result} >> ${run_benchmark_result_file}
@ -74,8 +74,8 @@ function Run_x86() {
echo ${model_name} >> "${run_benchmark_log_file}"
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86} >> "${run_benchmark_log_file}"
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'_posttraining.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/quantTraining/mnist_calibration_data/00099.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'_posttraining.ms.out --warmUpLoopCount=1 --loopCount=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}_posttraining.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/quantTraining/mnist_calibration_data/00099.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}_posttraining.ms.out --warmUpLoopCount=1 --loopCount=1 >> "${run_benchmark_log_file}"
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'_posttraining.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/quantTraining/mnist_calibration_data/00099.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'_posttraining.ms.out --warmUpLoopCount=1 --loopCount=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}_posttraining.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/quantTraining/mnist_calibration_data/00099.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}_posttraining.ms.out --warmUpLoopCount=1 --loopCount=1 >> "${run_benchmark_log_file}"
if [ $? = 0 ]; then
run_result='x86: '${model_name}'_posttraining pass'
echo ${run_result} >> ${run_benchmark_result_file}
@ -95,8 +95,8 @@ function Run_x86() {
echo ${model_name} >> "${run_benchmark_log_file}"
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86} >> "${run_benchmark_log_file}"
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1 --numThreads=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 --numThreads=1 >> "${run_benchmark_log_file}"
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1 --numThreads=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 --numThreads=1 >> "${run_benchmark_log_file}"
if [ $? = 0 ]; then
run_result='x86: '${model_name}'_awaretraining pass'
echo ${run_result} >> ${run_benchmark_result_file}
@ -116,8 +116,8 @@ function Run_x86() {
echo ${model_name} >> "${run_benchmark_log_file}"
echo 'cd '${convertor_path}'/mindspore-lite-'${version}'-runtime-x86-'${process_unit_x86} >> "${run_benchmark_log_file}"
cd ${convertor_path}/mindspore-lite-${version}-runtime-x86-${process_unit_x86} || return 1
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 --accuracyThreshold=1.5 >> "${run_benchmark_log_file}"
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath='${ms_models_path}'/'${model_name}'.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/'${model_name}'.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/'${model_name}'.ms.out --warmUpLoopCount=1 --loopCount=1' >> "${run_benchmark_log_file}"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./lib:./third_party/libjpeg-turbo/lib:./third_party/opencv/lib;./benchmark/benchmark --modelPath=${ms_models_path}/${model_name}.ms --inDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/input/${model_name}.ms.bin --calibDataPath=/home/workspace/mindspore_dataset/mslite/models/hiai/input_output/output/${model_name}.ms.out --warmUpLoopCount=1 --loopCount=1 --accuracyThreshold=1.5 >> "${run_benchmark_log_file}"
if [ $? = 0 ]; then
run_result='x86: '${model_name}' pass'
echo ${run_result} >> ${run_benchmark_result_file}
@ -498,6 +498,17 @@ rm -rf ${basepath}/benchmark_test
mkdir -p ${basepath}/benchmark_test
benchmark_test_path=${basepath}/benchmark_test
cd ${benchmark_test_path} || exit 1
# If build with minddata, copy the minddata related libs
if [ -f ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/lib/libminddata-lite.so ]; then
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/third_party/libjpeg-turbo/lib/libjpeg.so ${benchmark_test_path}/libjpeg.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/third_party/libjpeg-turbo/lib/libturbojpeg.so ${benchmark_test_path}/libturbojpeg.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/third_party/opencv/lib/libopencv_core.so ${benchmark_test_path}/libopencv_core.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/third_party/opencv/lib/libopencv_imgcodecs.so ${benchmark_test_path}/libopencv_imgcodecs.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/third_party/opencv/lib/libopencv_imgproc.so ${benchmark_test_path}/libopencv_imgproc.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/lib/libminddata-lite.so ${benchmark_test_path}/libminddata-lite.so || exit 1
fi
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/lib/libmindspore-lite.so ${benchmark_test_path}/libmindspore-lite.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/lib/liboptimize.so ${benchmark_test_path}/liboptimize.so || exit 1
cp -a ${arm_path}/mindspore-lite-${version}-runtime-arm64-${process_unit_arm}/benchmark/benchmark ${benchmark_test_path}/benchmark || exit 1