small minddata lite for train task

This commit is contained in:
xulei2020 2020-11-11 17:30:27 +08:00
parent f60ae8925d
commit 50efdcc2df
10 changed files with 246 additions and 137 deletions

View File

@ -598,6 +598,9 @@ build_opencv() {
}
build_jpeg_turbo() {
if [ -d "${BASEPATH}"/third_party/libjpeg-turbo/lib ];then
rm -rf "${BASEPATH}"/third_party/libjpeg-turbo/lib
fi
cd ${BASEPATH}
if [[ "${LITE_PLATFORM}" == "x86_64" ]]; then
JPEG_TURBO="${BASEPATH}"/third_party/libjpeg-turbo/lib/libjpeg.so.62.3.0

View File

@ -23,7 +23,6 @@
#include "minddata/dataset/include/transforms.h"
#ifndef ENABLE_ANDROID
#include "minddata/dataset/engine/ir/cache/dataset_cache_impl.h"
#endif
@ -32,22 +31,29 @@
// IR non-leaf nodes
#include "minddata/dataset/engine/ir/datasetops/batch_node.h"
#include "minddata/dataset/engine/ir/datasetops/concat_node.h"
#include "minddata/dataset/engine/ir/datasetops/filter_node.h"
#include "minddata/dataset/engine/ir/datasetops/map_node.h"
#include "minddata/dataset/engine/ir/datasetops/project_node.h"
#include "minddata/dataset/engine/ir/datasetops/rename_node.h"
#include "minddata/dataset/engine/ir/datasetops/repeat_node.h"
#include "minddata/dataset/engine/ir/datasetops/shuffle_node.h"
#include "minddata/dataset/engine/ir/datasetops/skip_node.h"
#include "minddata/dataset/engine/ir/datasetops/take_node.h"
#include "minddata/dataset/engine/ir/datasetops/transfer_node.h"
#include "minddata/dataset/engine/ir/datasetops/zip_node.h"
#ifndef ENABLE_ANDROID
#include "minddata/dataset/engine/ir/datasetops/bucket_batch_by_length_node.h"
#include "minddata/dataset/engine/ir/datasetops/build_sentence_piece_vocab_node.h"
#include "minddata/dataset/engine/ir/datasetops/build_vocab_node.h"
#include "minddata/dataset/engine/ir/datasetops/concat_node.h"
#include "minddata/dataset/engine/ir/datasetops/filter_node.h"
#endif
#include "minddata/dataset/engine/ir/datasetops/map_node.h"
#include "minddata/dataset/engine/ir/datasetops/project_node.h"
#ifndef ENABLE_ANDROID
#include "minddata/dataset/engine/ir/datasetops/rename_node.h"
#endif
#include "minddata/dataset/engine/ir/datasetops/repeat_node.h"
#include "minddata/dataset/engine/ir/datasetops/shuffle_node.h"
#ifndef ENABLE_ANDROID
#include "minddata/dataset/engine/ir/datasetops/skip_node.h"
#include "minddata/dataset/engine/ir/datasetops/take_node.h"
#include "minddata/dataset/engine/ir/datasetops/transfer_node.h"
#include "minddata/dataset/engine/ir/datasetops/zip_node.h"
#endif
#include "minddata/dataset/core/config_manager.h"
@ -57,6 +63,9 @@
// IR leaf nodes
#include "minddata/dataset/engine/ir/datasetops/source/album_node.h"
// IR leaf nodes disabled for android
#ifndef ENABLE_ANDROID
#include "minddata/dataset/engine/ir/datasetops/source/celeba_node.h"
#include "minddata/dataset/engine/ir/datasetops/source/cifar100_node.h"
#include "minddata/dataset/engine/ir/datasetops/source/cifar10_node.h"
@ -67,9 +76,6 @@
#include "minddata/dataset/engine/ir/datasetops/source/mnist_node.h"
#include "minddata/dataset/engine/ir/datasetops/source/random_node.h"
#include "minddata/dataset/engine/ir/datasetops/source/text_file_node.h"
// IR leaf nodes disabled for android
#ifndef ENABLE_ANDROID
#include "minddata/dataset/engine/ir/datasetops/source/manifest_node.h"
#include "minddata/dataset/engine/ir/datasetops/source/minddata_node.h"
#include "minddata/dataset/engine/ir/datasetops/source/tf_record_node.h"
@ -107,6 +113,7 @@ std::shared_ptr<Iterator> Dataset::CreateIterator(std::vector<std::string> colum
return iter;
}
#ifndef ENABLE_ANDROID
// Function to return a transferred Node that transfers data through a device.
bool Dataset::DeviceQueue(bool send_epoch_end) {
Status rc;
@ -142,7 +149,6 @@ bool Dataset::DeviceQueue(bool send_epoch_end) {
return true;
}
#ifndef ENABLE_ANDROID
// Function to create the saver, which will build and launch the execution tree and save data
bool Dataset::Save(std::string dataset_path, int32_t num_files, std::string dataset_type) {
Status rc;
@ -320,6 +326,7 @@ std::shared_ptr<AlbumDataset> Album(const std::string &dataset_dir, const std::s
return ds;
}
#ifndef ENABLE_ANDROID
// Function to create a CelebADataset.
std::shared_ptr<CelebADataset> CelebA(const std::string &dataset_dir, const std::string &usage,
const std::shared_ptr<SamplerObj> &sampler, bool decode,
@ -389,7 +396,6 @@ std::shared_ptr<ImageFolderDataset> ImageFolder(const std::string &dataset_dir,
return ds;
}
#ifndef ENABLE_ANDROID
// Function to create a ManifestDataset.
std::shared_ptr<ManifestDataset> Manifest(const std::string &dataset_file, const std::string &usage,
const std::shared_ptr<SamplerObj> &sampler,
@ -418,7 +424,6 @@ std::shared_ptr<MindDataDataset> MindData(const std::vector<std::string> &datase
return ds;
}
#endif
// Function to create a MnistDataset.
std::shared_ptr<MnistDataset> Mnist(const std::string &dataset_dir, const std::string &usage,
@ -428,7 +433,6 @@ std::shared_ptr<MnistDataset> Mnist(const std::string &dataset_dir, const std::s
return ds;
}
// Function to overload "+" operator to concat two datasets
std::shared_ptr<ConcatDataset> operator+(const std::shared_ptr<Dataset> &datasets1,
const std::shared_ptr<Dataset> &datasets2) {
@ -444,7 +448,6 @@ std::shared_ptr<TextFileDataset> TextFile(const std::vector<std::string> &datase
return ds;
}
#ifndef ENABLE_ANDROID
// Function to create a VOCDataset.
std::shared_ptr<VOCDataset> VOC(const std::string &dataset_dir, const std::string &task, const std::string &usage,
const std::map<std::string, int32_t> &class_indexing, bool decode,
@ -454,14 +457,13 @@ std::shared_ptr<VOCDataset> VOC(const std::string &dataset_dir, const std::strin
return ds;
}
#endif
// Function to create a ZipDatset.
std::shared_ptr<ZipDataset> Zip(const std::vector<std::shared_ptr<Dataset>> &datasets) {
auto ds = std::make_shared<ZipDataset>(datasets);
return ds;
}
#endif
// FUNCTIONS TO CREATE DATASETS FOR DATASET OPS
// (In alphabetical order)
@ -487,8 +489,6 @@ BucketBatchByLengthDataset::BucketBatchByLengthDataset(
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#endif
ConcatDataset::ConcatDataset(const std::vector<std::shared_ptr<Dataset>> &datasets) {
std::vector<std::shared_ptr<DatasetNode>> all_datasets;
(void)std::transform(datasets.begin(), datasets.end(), std::back_inserter(all_datasets),
@ -501,7 +501,6 @@ ConcatDataset::ConcatDataset(const std::vector<std::shared_ptr<Dataset>> &datase
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#ifndef ENABLE_ANDROID
FilterDataset::FilterDataset(std::shared_ptr<Dataset> input, std::function<TensorRow(TensorRow)> predicate,
std::vector<std::string> input_columns) {
auto ds = std::make_shared<FilterNode>(input->IRNode(), predicate, input_columns);
@ -525,13 +524,14 @@ ProjectDataset::ProjectDataset(std::shared_ptr<Dataset> input, const std::vector
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#ifndef ENABLE_ANDROID
RenameDataset::RenameDataset(std::shared_ptr<Dataset> input, const std::vector<std::string> &input_columns,
const std::vector<std::string> &output_columns) {
auto ds = std::make_shared<RenameNode>(input->IRNode(), input_columns, output_columns);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#endif
RepeatDataset::RepeatDataset(std::shared_ptr<Dataset> input, int32_t count) {
// Workaround for repeat == 1, do not inject repeat.
@ -552,6 +552,7 @@ ShuffleDataset::ShuffleDataset(std::shared_ptr<Dataset> input, int32_t buffer_si
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#ifndef ENABLE_ANDROID
SkipDataset::SkipDataset(std::shared_ptr<Dataset> input, int32_t count) {
auto ds = std::make_shared<SkipNode>(input->IRNode(), count);
@ -581,7 +582,7 @@ ZipDataset::ZipDataset(const std::vector<std::shared_ptr<Dataset>> &datasets) {
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#endif
int64_t Dataset::GetBatchSize() {
int64_t batch_size;
auto ds = shared_from_this();
@ -691,8 +692,8 @@ std::shared_ptr<Vocab> Dataset::BuildVocab(const std::vector<std::string> &colum
}
return vocab;
}
#endif
std::shared_ptr<BatchDataset> Dataset::Batch(int32_t batch_size, bool drop_remainder) {
return std::make_shared<BatchDataset>(shared_from_this(), batch_size, drop_remainder);
}
@ -877,6 +878,8 @@ AlbumDataset::AlbumDataset(const std::string &dataset_dir, const std::string &da
auto ds = std::make_shared<AlbumNode>(dataset_dir, data_schema, column_names, decode, sampler, cache);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#ifndef ENABLE_ANDROID
CelebADataset::CelebADataset(const std::string &dataset_dir, const std::string &usage,
const std::shared_ptr<SamplerObj> &sampler, bool decode,
const std::set<std::string> &extensions, const std::shared_ptr<DatasetCache> &cache) {
@ -928,7 +931,6 @@ ImageFolderDataset::ImageFolderDataset(const std::string &dataset_dir, bool deco
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#ifndef ENABLE_ANDROID
ManifestDataset::ManifestDataset(const std::string &dataset_file, const std::string &usage,
const std::shared_ptr<SamplerObj> &sampler,
const std::map<std::string, int32_t> &class_indexing, bool decode,
@ -949,7 +951,7 @@ MindDataDataset::MindDataDataset(const std::vector<std::string> &dataset_files,
auto ds = std::make_shared<MindDataNode>(dataset_files, columns_list, sampler, padded_sample, num_padded);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#endif
MnistDataset::MnistDataset(const std::string &dataset_dir, const std::string &usage,
const std::shared_ptr<SamplerObj> &sampler, const std::shared_ptr<DatasetCache> &cache) {
auto ds = std::make_shared<MnistNode>(dataset_dir, usage, sampler, cache);
@ -961,14 +963,14 @@ TextFileDataset::TextFileDataset(const std::vector<std::string> &dataset_files,
auto ds = std::make_shared<TextFileNode>(dataset_files, num_samples, shuffle, num_shards, shard_id, cache);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#ifndef ENABLE_ANDROID
VOCDataset::VOCDataset(const std::string &dataset_dir, const std::string &task, const std::string &usage,
const std::map<std::string, int32_t> &class_indexing, bool decode,
const std::shared_ptr<SamplerObj> &sampler, const std::shared_ptr<DatasetCache> &cache) {
auto ds = std::make_shared<VOCNode>(dataset_dir, task, usage, class_indexing, decode, sampler, cache);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#endif
RandomDataDataset::RandomDataDataset(const int32_t &total_rows, std::shared_ptr<SchemaObj> schema,
const std::vector<std::string> &columns_list,
std::shared_ptr<DatasetCache> cache) {
@ -981,7 +983,7 @@ RandomDataDataset::RandomDataDataset(const int32_t &total_rows, std::string sche
auto ds = std::make_shared<RandomNode>(total_rows, std::move(schema_path), std::move(columns_list), cache);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#ifndef ENABLE_ANDROID
TFRecordDataset::TFRecordDataset(const std::vector<std::string> &dataset_files, std::string schema,
const std::vector<std::string> &columns_list, int64_t num_samples, ShuffleMode shuffle,
int32_t num_shards, int32_t shard_id, bool shard_equal_rows,

View File

@ -27,13 +27,13 @@ namespace dataset {
// constructor
EpochInjectionPass::InjectionFinder::InjectionFinder(std::shared_ptr<DatasetOp> node) : injection_point_(node) {}
#ifndef ENABLE_ANDROID
// Performs finder work for BuildVocabOp that has special rules about epoch control injection
Status EpochInjectionPass::InjectionFinder::PreRunOnNode(std::shared_ptr<BuildVocabOp> node, bool *modified) {
injection_point_ = nullptr;
return Status::OK();
}
#ifndef ENABLE_ANDROID
// Performs finder work for BuildSentencePieceVocabOp that has special rules about epoch control injection
Status EpochInjectionPass::InjectionFinder::PreRunOnNode(std::shared_ptr<BuildSentencePieceVocabOp> node,
bool *modified) {

View File

@ -42,13 +42,13 @@ class EpochInjectionPass : public TreePass {
/// \brief Destructor
~InjectionFinder() = default;
#ifndef ENABLE_ANDROID
/// \brief Performs finder work for BuildVocabOp that has special rules about epoch control injection.
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \return Status The error code return
Status PreRunOnNode(std::shared_ptr<BuildVocabOp> node, bool *modified) override;
#ifndef ENABLE_ANDROID
/// \brief Performs finder work for BuildSentencePieceVocabOp that has special rules about epoch control injection.
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all

View File

@ -65,21 +65,25 @@ class CsvBase;
// Dataset classes (in alphabetical order)
class BatchDataset;
class MapDataset;
class ProjectDataset;
class ShuffleDataset;
#ifndef ENABLE_ANDROID
class BucketBatchByLengthDataset;
class FilterDataset;
#endif
class CSVDataset;
class TransferDataset;
class ConcatDataset;
class MapDataset;
class ProjectDataset;
class RenameDataset;
#endif
class RepeatDataset;
class ShuffleDataset;
#ifndef ENABLE_ANDROID
class SkipDataset;
class TakeDataset;
class TransferDataset;
class ZipDataset;
#endif
/// \class Dataset datasets.h
/// \brief A base class to represent a dataset in the data pipeline.
@ -133,6 +137,7 @@ class Dataset : public std::enable_shared_from_this<Dataset> {
/// \return Shared pointer to the Iterator
std::shared_ptr<Iterator> CreateIterator(std::vector<std::string> columns = {});
#ifndef ENABLE_ANDROID
/// \brief Function to transfer data through a device.
/// \notes If device is Ascend, features of data will be transferred one by one. The limitation
/// of data transmission per time is 256M.
@ -140,7 +145,6 @@ class Dataset : public std::enable_shared_from_this<Dataset> {
/// \return Returns true if no error encountered else false.
bool DeviceQueue(bool send_epoch_end = true);
#ifndef ENABLE_ANDROID
/// \brief Function to create a Saver to save the dynamic data processed by the dataset pipeline
/// \note Usage restrictions:
/// 1. Supported dataset formats: 'mindrecord' only
@ -235,7 +239,6 @@ class Dataset : public std::enable_shared_from_this<Dataset> {
const std::pair<int64_t, int64_t> &freq_range = {0, kDeMaxFreq},
int64_t top_k = kDeMaxTopk, const std::vector<std::string> &special_tokens = {},
bool special_first = true);
#endif
/// \brief Function to create a ConcatDataset
/// \notes Concat the datasets in the input
@ -247,7 +250,6 @@ class Dataset : public std::enable_shared_from_this<Dataset> {
return std::make_shared<ConcatDataset>(all_datasets);
}
#ifndef ENABLE_ANDROID
/// \brief Function to filter dataset by predicate
/// \notes If input_columns is not provided or empty, all columns will be used
/// \param[in] predicate Function callable which returns a boolean value. If false then filter the element
@ -294,6 +296,7 @@ class Dataset : public std::enable_shared_from_this<Dataset> {
return std::make_shared<ProjectDataset>(shared_from_this(), columns);
}
#ifndef ENABLE_ANDROID
/// \brief Function to create a Rename Dataset
/// \notes Renames the columns in the input dataset
/// \param[in] input_columns List of the input columns to rename
@ -303,7 +306,7 @@ class Dataset : public std::enable_shared_from_this<Dataset> {
const std::vector<std::string> &output_columns) {
return std::make_shared<RenameDataset>(shared_from_this(), input_columns, output_columns);
}
#endif
/// \brief Function to create a RepeatDataset
/// \notes Repeats this dataset count times. Repeat indefinitely if count is -1
/// \param[in] count Number of times the dataset should be repeated
@ -313,7 +316,7 @@ class Dataset : public std::enable_shared_from_this<Dataset> {
std::shared_ptr<RepeatDataset> Repeat(int32_t count = -1) {
return std::make_shared<RepeatDataset>(shared_from_this(), count);
}
#ifndef ENABLE_ANDROID
/// \brief Function to create a Shuffle Dataset
/// \notes Randomly shuffles the rows of this dataset
/// \param[in] buffer_size The size of the buffer (must be larger than 1) for shuffling
@ -345,6 +348,7 @@ class Dataset : public std::enable_shared_from_this<Dataset> {
all_datasets.push_back(shared_from_this());
return std::make_shared<ZipDataset>(all_datasets);
}
#endif
std::shared_ptr<DatasetNode> IRNode() { return ir_node_; }
@ -429,14 +433,11 @@ class BucketBatchByLengthDataset : public Dataset {
bool pad_to_bucket_boundary = false, bool drop_remainder = false);
};
#endif
class ConcatDataset : public Dataset {
public:
explicit ConcatDataset(const std::vector<std::shared_ptr<Dataset>> &input);
};
#ifndef ENABLE_ANDROID
class FilterDataset : public Dataset {
public:
FilterDataset(std::shared_ptr<Dataset> input, std::function<TensorRow(TensorRow)> predicate,
@ -457,11 +458,13 @@ class ProjectDataset : public Dataset {
ProjectDataset(std::shared_ptr<Dataset> input, const std::vector<std::string> &columns);
};
#ifndef ENABLE_ANDROID
class RenameDataset : public Dataset {
public:
RenameDataset(std::shared_ptr<Dataset> input, const std::vector<std::string> &input_columns,
const std::vector<std::string> &output_columns);
};
#endif
class RepeatDataset : public Dataset {
public:
@ -473,6 +476,7 @@ class ShuffleDataset : public Dataset {
ShuffleDataset(std::shared_ptr<Dataset> input, int32_t buffer_size);
};
#ifndef ENABLE_ANDROID
class SkipDataset : public Dataset {
public:
SkipDataset(std::shared_ptr<Dataset> input, int32_t count);
@ -487,7 +491,7 @@ class ZipDataset : public Dataset {
public:
explicit ZipDataset(const std::vector<std::shared_ptr<Dataset>> &inputs);
};
#endif
/// \brief Function to create a SchemaObj
/// \param[in] schema_file Path of schema file
/// \return Shared pointer to the current schema
@ -518,6 +522,7 @@ std::shared_ptr<AlbumDataset> Album(const std::string &dataset_dir, const std::s
const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
const std::shared_ptr<DatasetCache> &cache = nullptr);
#ifndef ENABLE_ANDROID
class CelebADataset : public Dataset {
public:
explicit CelebADataset(const std::string &dataset_dir, const std::string &usage = "all",
@ -710,7 +715,6 @@ std::shared_ptr<ImageFolderDataset> ImageFolder(const std::string &dataset_dir,
const std::map<std::string, int32_t> &class_indexing = {},
const std::shared_ptr<DatasetCache> &cache = nullptr);
#ifndef ENABLE_ANDROID
class ManifestDataset : public Dataset {
public:
explicit ManifestDataset(const std::string &dataset_file, const std::string &usage = "train",
@ -735,9 +739,7 @@ std::shared_ptr<ManifestDataset> Manifest(const std::string &dataset_file, const
const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
const std::map<std::string, int32_t> &class_indexing = {},
bool decode = false, const std::shared_ptr<DatasetCache> &cache = nullptr);
#endif
#ifndef ENABLE_ANDROID
class MindDataDataset : public Dataset {
public:
explicit MindDataDataset(const std::string &dataset_file, const std::vector<std::string> &columns_list = {},
@ -777,7 +779,6 @@ std::shared_ptr<MindDataDataset> MindData(const std::vector<std::string> &datase
const std::vector<std::string> &columns_list = {},
const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
nlohmann::json padded_sample = nullptr, int64_t num_padded = 0);
#endif
class MnistDataset : public Dataset {
public:
@ -867,7 +868,6 @@ std::shared_ptr<TextFileDataset> TextFile(const std::vector<std::string> &datase
ShuffleMode shuffle = ShuffleMode::kGlobal, int32_t num_shards = 1,
int32_t shard_id = 0, const std::shared_ptr<DatasetCache> &cache = nullptr);
#ifndef ENABLE_ANDROID
class TFRecordDataset : public Dataset {
public:
TFRecordDataset(const std::vector<std::string> &dataset_files, std::string schema,
@ -974,13 +974,13 @@ std::shared_ptr<DatasetCache> CreateDatasetCache(session_id_type id, uint64_t me
std::optional<int32_t> port = std::nullopt,
std::optional<int32_t> num_connections = std::nullopt,
std::optional<int32_t> prefetch_sz = std::nullopt);
#endif
/// \brief Function to create a ZipDataset
/// \notes Applies zip to the dataset
/// \param[in] datasets List of shared pointers to the datasets that we want to zip
/// \return Shared pointer to the current Dataset
std::shared_ptr<ZipDataset> Zip(const std::vector<std::shared_ptr<Dataset>> &datasets);
#endif
} // namespace dataset
} // namespace mindspore

View File

@ -752,7 +752,7 @@ bool Merge(const std::vector<LiteMat> &mv, LiteMat &dst) {
bool Pad(const LiteMat &src, LiteMat &dst, int top, int bottom, int left, int right, PaddBorderType pad_type,
uint8_t fill_b_or_gray, uint8_t fill_g, uint8_t fill_r) {
if (top <= 0 || bottom <= 0 || left <= 0 || right <= 0) {
if (top < 0 || bottom <= 0 || left < 0 || right < 0) {
return false;
}
if (src.IsEmpty()) {

View File

@ -59,6 +59,20 @@ LiteMat::LiteMat(int width, int height, LDataType data_type) {
Init(width, height, data_type);
}
LiteMat::LiteMat(int width, int height, void *p_data, LDataType data_type) {
data_ptr_ = 0;
elem_size_ = 0;
width_ = 0;
height_ = 0;
channel_ = 0;
c_step_ = 0;
dims_ = 0;
data_type_ = LDataType::UINT8;
ref_count_ = 0;
size_ = 0;
Init(width, height, p_data, data_type);
}
LiteMat::LiteMat(int width, int height, int channel, LDataType data_type) {
data_ptr_ = 0;
elem_size_ = 0;
@ -74,17 +88,17 @@ LiteMat::LiteMat(int width, int height, int channel, LDataType data_type) {
}
LiteMat::LiteMat(int width, int height, int channel, void *p_data, LDataType data_type) {
data_type_ = data_type;
InitElemSize(data_type);
width_ = width;
height_ = height;
dims_ = 3;
channel_ = channel;
c_step_ = height_ * width_;
size_ = c_step_ * channel_ * elem_size_;
data_ptr_ = p_data;
ref_count_ = new int[1];
*ref_count_ = 0;
data_ptr_ = 0;
elem_size_ = 0;
width_ = 0;
height_ = 0;
channel_ = 0;
c_step_ = 0;
dims_ = 0;
data_type_ = LDataType::UINT8;
ref_count_ = 0;
size_ = 0;
Init(width, height, channel, p_data, data_type);
}
LiteMat::~LiteMat() { Release(); }
@ -153,7 +167,6 @@ void LiteMat::Init(int width, int height, LDataType data_type) {
Release();
data_type_ = data_type;
InitElemSize(data_type);
width_ = width;
height_ = height;
dims_ = 2;
@ -165,6 +178,20 @@ void LiteMat::Init(int width, int height, LDataType data_type) {
*ref_count_ = 1;
}
void LiteMat::Init(int width, int height, void *p_data, LDataType data_type) {
data_type_ = data_type;
InitElemSize(data_type);
width_ = width;
height_ = height;
dims_ = 2;
channel_ = 1;
c_step_ = height_ * width_;
size_ = c_step_ * channel_ * elem_size_;
data_ptr_ = p_data;
ref_count_ = new int[1];
*ref_count_ = 0;
}
void LiteMat::Init(int width, int height, int channel, LDataType data_type) {
Release();
data_type_ = data_type;
@ -175,13 +202,25 @@ void LiteMat::Init(int width, int height, int channel, LDataType data_type) {
channel_ = channel;
c_step_ = ((height_ * width_ * elem_size_ + ALIGN - 1) & (-ALIGN)) / elem_size_;
size_ = c_step_ * channel_ * elem_size_;
data_ptr_ = AlignMalloc(size_);
ref_count_ = new int[1];
*ref_count_ = 1;
}
void LiteMat::Init(int width, int height, int channel, void *p_data, LDataType data_type) {
data_type_ = data_type;
InitElemSize(data_type);
width_ = width;
height_ = height;
dims_ = 3;
channel_ = channel;
c_step_ = height_ * width_;
size_ = c_step_ * channel_ * elem_size_;
data_ptr_ = p_data;
ref_count_ = new int[1];
*ref_count_ = 0;
}
bool LiteMat::IsEmpty() const { return data_ptr_ == 0 || data_ptr_ == nullptr || c_step_ * channel_ == 0; }
void LiteMat::Release() {

View File

@ -193,6 +193,8 @@ class LiteMat {
LiteMat(int width, int height, LDataType data_type = LDataType::UINT8);
LiteMat(int width, int height, void *p_data, LDataType data_type = LDataType::UINT8);
LiteMat(int width, int height, int channel, LDataType data_type = LDataType::UINT8);
LiteMat(int width, int height, int channel, void *p_data, LDataType data_type = LDataType::UINT8);
@ -205,8 +207,12 @@ class LiteMat {
void Init(int width, int height, LDataType data_type = LDataType::UINT8);
void Init(int width, int height, void *p_data, LDataType data_type = LDataType::UINT8);
void Init(int width, int height, int channel, LDataType data_type = LDataType::UINT8);
void Init(int width, int height, int channel, void *p_data, LDataType data_type = LDataType::UINT8);
bool IsEmpty() const;
void Release();

View File

@ -217,6 +217,16 @@ Status JpegCropAndDecode(const std::shared_ptr<Tensor> &input, std::shared_ptr<T
return Status::OK();
}
static LDataType GetLiteCVDataType(DataType data_type) {
if (data_type == DataType::DE_UINT8) {
return LDataType::UINT8;
} else if (data_type == DataType::DE_FLOAT32) {
return LDataType::FLOAT32;
} else {
return LDataType::UNKNOWN;
}
}
Status Decode(const std::shared_ptr<Tensor> &input, std::shared_ptr<Tensor> *output) {
if (IsNonEmptyJPEG(input)) {
return JpegCropAndDecode(input, output);
@ -230,7 +240,7 @@ Status Crop(const std::shared_ptr<Tensor> &input, std::shared_ptr<Tensor> *outpu
RETURN_STATUS_UNEXPECTED("Shape not <H,W,C> or <H,W>");
}
if (input->type() != DataType::DE_FLOAT32 || input->type() != DataType::DE_UINT8) {
if (input->type() != DataType::DE_FLOAT32 && input->type() != DataType::DE_UINT8) {
RETURN_STATUS_UNEXPECTED("Only float32, uint8 support in Crop");
}
@ -243,16 +253,22 @@ Status Crop(const std::shared_ptr<Tensor> &input, std::shared_ptr<Tensor> *outpu
RETURN_STATUS_UNEXPECTED("Invalid x coordinate value for crop");
}
LiteMat lite_mat_rgb(input->shape()[1], input->shape()[0], 3,
const_cast<void *>(reinterpret_cast<const void *>(input->GetBuffer())), LDataType::UINT8);
try {
LiteMat lite_mat_rgb;
TensorShape shape{h, w};
int num_channels = input->shape()[2];
if (input->Rank() == 3) shape = shape.AppendDim(num_channels);
if (input->Rank() == 2) {
lite_mat_rgb.Init(input->shape()[1], input->shape()[0],
const_cast<void *>(reinterpret_cast<const void *>(input->GetBuffer())),
GetLiteCVDataType(input->type()));
} else { // rank == 3
lite_mat_rgb.Init(input->shape()[1], input->shape()[0], input->shape()[2],
const_cast<void *>(reinterpret_cast<const void *>(input->GetBuffer())),
GetLiteCVDataType(input->type()));
int num_channels = input->shape()[2];
shape = shape.AppendDim(num_channels);
}
LiteMat lite_mat_cut;
bool ret = Crop(lite_mat_rgb, lite_mat_cut, x, y, x + w, y + h);
bool ret = Crop(lite_mat_rgb, lite_mat_cut, x, y, w, h);
CHECK_FAIL_RETURN_UNEXPECTED(ret, "Crop failed in lite cv");
// create output Tensor based off of lite_mat_cut
std::shared_ptr<Tensor> output_tensor;
@ -292,18 +308,10 @@ Status Normalize(const std::shared_ptr<Tensor> &input, std::shared_ptr<Tensor> *
RETURN_STATUS_UNEXPECTED("Input tensor rank isn't 3");
}
if (input->type() != DataType::DE_UINT8) {
RETURN_STATUS_UNEXPECTED("Only uint8 support in Normalize");
if (input->type() != DataType::DE_UINT8 && input->type() != DataType::DE_FLOAT32) {
RETURN_STATUS_UNEXPECTED("Only uint8, float32 support in Normalize");
}
LiteMat lite_mat_rgb(input->shape()[1], input->shape()[0], 3,
const_cast<void *>(reinterpret_cast<const void *>(input->GetBuffer())), LDataType::UINT8);
LiteMat lite_mat_float;
// change input to float
bool ret = ConvertTo(lite_mat_rgb, lite_mat_float, 1.0);
CHECK_FAIL_RETURN_UNEXPECTED(ret, "Conversion of lite cv to float failed");
mean->Squeeze();
if (mean->type() != DataType::DE_FLOAT32 || mean->Rank() != 1 || mean->shape()[0] != 3) {
std::string err_msg = "Mean tensor should be of size 3 and type float.";
@ -325,9 +333,24 @@ Status Normalize(const std::shared_ptr<Tensor> &input, std::shared_ptr<Tensor> *
vec_mean.push_back(mean_c);
vec_std.push_back(std_c);
}
LiteMat lite_mat_norm;
ret = SubStractMeanNormalize(lite_mat_float, lite_mat_norm, vec_mean, vec_std);
bool ret = false;
LiteMat lite_mat_rgb(input->shape()[1], input->shape()[0], input->shape()[2],
const_cast<void *>(reinterpret_cast<const void *>(input->GetBuffer())),
GetLiteCVDataType(input->type()));
if (input->type() == DataType::DE_UINT8) {
LiteMat lite_mat_float;
// change input to float
ret = ConvertTo(lite_mat_rgb, lite_mat_float, 1.0);
CHECK_FAIL_RETURN_UNEXPECTED(ret, "Conversion of lite cv to float failed");
ret = SubStractMeanNormalize(lite_mat_float, lite_mat_norm, vec_mean, vec_std);
} else { // float32
ret = SubStractMeanNormalize(lite_mat_rgb, lite_mat_norm, vec_mean, vec_std);
}
CHECK_FAIL_RETURN_UNEXPECTED(ret, "Normalize in lite cv failed");
// create output Tensor based off of lite_mat_cut
std::shared_ptr<Tensor> output_tensor;
RETURN_IF_NOT_OK(Tensor::CreateFromMemory(input->shape(), DataType(DataType::DE_FLOAT32),
@ -341,8 +364,8 @@ Status Normalize(const std::shared_ptr<Tensor> &input, std::shared_ptr<Tensor> *
Status Resize(const std::shared_ptr<Tensor> &input, std::shared_ptr<Tensor> *output, int32_t output_height,
int32_t output_width, double fx, double fy, InterpolationMode mode) {
if (input->Rank() != 3) {
RETURN_STATUS_UNEXPECTED("Input Tensor is not in shape of <H,W,C>");
if (input->Rank() != 3 && input->Rank() != 2) {
RETURN_STATUS_UNEXPECTED("Input Tensor is not in shape of <H,W,C> or <H,W>");
}
if (input->type() != DataType::DE_UINT8) {
RETURN_STATUS_UNEXPECTED("Only uint8 support in Resize");
@ -355,13 +378,20 @@ Status Resize(const std::shared_ptr<Tensor> &input, std::shared_ptr<Tensor> *out
"1000 times the original image; 2) can not be 0.";
return Status(StatusCode::kShapeMisMatch, err_msg);
}
LiteMat lite_mat_rgb(input->shape()[1], input->shape()[0], 3,
const_cast<void *>(reinterpret_cast<const void *>(input->GetBuffer())), LDataType::UINT8);
try {
LiteMat lite_mat_rgb;
TensorShape shape{output_height, output_width};
int num_channels = input->shape()[2];
if (input->Rank() == 3) shape = shape.AppendDim(num_channels);
if (input->Rank() == 2) {
lite_mat_rgb.Init(input->shape()[1], input->shape()[0],
const_cast<void *>(reinterpret_cast<const void *>(input->GetBuffer())),
GetLiteCVDataType(input->type()));
} else { // rank == 3
lite_mat_rgb.Init(input->shape()[1], input->shape()[0], input->shape()[2],
const_cast<void *>(reinterpret_cast<const void *>(input->GetBuffer())),
GetLiteCVDataType(input->type()));
int num_channels = input->shape()[2];
shape = shape.AppendDim(num_channels);
}
LiteMat lite_mat_resize;
bool ret = ResizeBilinear(lite_mat_rgb, lite_mat_resize, output_width, output_height);
@ -383,25 +413,25 @@ Status Pad(const std::shared_ptr<Tensor> &input, std::shared_ptr<Tensor> *output
RETURN_STATUS_UNEXPECTED("Input Tensor is not in shape of <H,W,C>");
}
if (input->type() != DataType::DE_FLOAT32 || input->type() != DataType::DE_UINT8) {
if (input->type() != DataType::DE_FLOAT32 && input->type() != DataType::DE_UINT8) {
RETURN_STATUS_UNEXPECTED("Only float32, uint8 support in Pad");
}
if (pad_top <= 0 || pad_bottom <= 0 || pad_left <= 0 || pad_right <= 0) {
if (pad_top < 0 || pad_bottom < 0 || pad_left < 0 || pad_right < 0) {
RETURN_STATUS_UNEXPECTED("The pad, top, bottom, left, right must be greater than 0");
}
try {
LiteMat lite_mat_rgb(input->shape()[1], input->shape()[0], 3,
const_cast<void *>(reinterpret_cast<const void *>(input->GetBuffer())), LDataType::UINT8);
LiteMat lite_mat_rgb(input->shape()[1], input->shape()[0], input->shape()[2],
const_cast<void *>(reinterpret_cast<const void *>(input->GetBuffer())),
GetLiteCVDataType(input->type()));
LiteMat lite_mat_pad;
bool ret = Pad(lite_mat_rgb, lite_mat_pad, pad_top, pad_bottom, pad_left, pad_right,
PaddBorderType::PADD_BORDER_CONSTANT, fill_r, fill_g, fill_b);
CHECK_FAIL_RETURN_UNEXPECTED(ret, "Pad failed in lite cv");
std::shared_ptr<Tensor> output_tensor;
RETURN_IF_NOT_OK(Tensor::CreateFromMemory(input->shape(), DataType(DataType::DE_FLOAT32),
RETURN_IF_NOT_OK(Tensor::CreateFromMemory(input->shape(), input->type(),
static_cast<uchar *>(lite_mat_pad.data_ptr_), &output_tensor));
*output = output_tensor;
} catch (std::runtime_error &e) {

View File

@ -105,6 +105,11 @@ if (BUILD_MINDDATA STREQUAL "full")
"${MINDDATA_DIR}/engine/datasetops/cache_base_op.cc"
"${MINDDATA_DIR}/engine/datasetops/cache_lookup_op.cc"
"${MINDDATA_DIR}/engine/datasetops/cache_op.cc"
"${MINDDATA_DIR}/engine/datasetops/concat_op.cc"
"${MINDDATA_DIR}/engine/datasetops/rename_op.cc"
"${MINDDATA_DIR}/engine/datasetops/skip_op.cc"
"${MINDDATA_DIR}/engine/datasetops/take_op.cc"
"${MINDDATA_DIR}/engine/datasetops/zip_op.cc"
)
list(REMOVE_ITEM MINDDATA_ENGINE_DATASETOPS_SOURCE_SRC_FILES
@ -113,6 +118,16 @@ if (BUILD_MINDDATA STREQUAL "full")
"${MINDDATA_DIR}/engine/datasetops/source/manifest_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/mindrecord_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/tf_reader_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/celeba_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/cifar_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/clue_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/coco_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/csv_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/image_folder_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/mnist_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/random_data_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/text_file_op.cc"
"${MINDDATA_DIR}/engine/datasetops/source/voc_op.cc"
)
list(REMOVE_ITEM MINDDATA_ENGINE_DATASETOPS_SOURCE_SAMPLER_SRC_FILES
@ -138,6 +153,17 @@ if (BUILD_MINDDATA STREQUAL "full")
"${MINDDATA_DIR}/engine/ir/datasetops/source/minddata_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/tf_record_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/voc_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/celeba_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/cifar10_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/cifar100_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/coco_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/csv_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/image_folder_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/manifest_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/mnist_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/random_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/text_file_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/source/clue_node.cc"
)
list(REMOVE_ITEM MINDDATA_KERNELS_IMAGE_SRC_FILES
"${MINDDATA_DIR}/kernels/image/affine_op.cc"
@ -148,7 +174,7 @@ if (BUILD_MINDDATA STREQUAL "full")
"${MINDDATA_DIR}/kernels/image/cut_out_op.cc"
"${MINDDATA_DIR}/kernels/image/cutmix_batch_op.cc"
"${MINDDATA_DIR}/kernels/image/equalize_op.cc"
"${MINDDATA_DIR}/kernels/image/hwc_to_chw_op.cc"
"${MINDDATA_DIR}/kernels/image/hwc_to_chw_op.cc"
"${MINDDATA_DIR}/kernels/image/image_utils.cc"
"${MINDDATA_DIR}/kernels/image/invert_op.cc"
"${MINDDATA_DIR}/kernels/image/math_utils.cc"
@ -160,28 +186,28 @@ if (BUILD_MINDDATA STREQUAL "full")
"${MINDDATA_DIR}/kernels/image/random_crop_and_resize_with_bbox_op.cc"
"${MINDDATA_DIR}/kernels/image/random_crop_decode_resize_op.cc"
"${MINDDATA_DIR}/kernels/image/random_crop_and_resize_op.cc"
"${MINDDATA_DIR}/kernels/image/random_crop_op.cc"
"${MINDDATA_DIR}/kernels/image/random_crop_with_bbox_op.cc"
"${MINDDATA_DIR}/kernels/image/random_horizontal_flip_op.cc"
"${MINDDATA_DIR}/kernels/image/random_horizontal_flip_with_bbox_op.cc"
"${MINDDATA_DIR}/kernels/image/random_posterize_op.cc"
"${MINDDATA_DIR}/kernels/image/random_resize_op.cc"
"${MINDDATA_DIR}/kernels/image/random_rotation_op.cc"
"${MINDDATA_DIR}/kernels/image/random_select_subpolicy_op.cc"
"${MINDDATA_DIR}/kernels/image/random_solarize_op.cc"
"${MINDDATA_DIR}/kernels/image/random_vertical_flip_op.cc"
"${MINDDATA_DIR}/kernels/image/random_vertical_flip_with_bbox_op.cc"
"${MINDDATA_DIR}/kernels/image/random_sharpness_op.cc"
"${MINDDATA_DIR}/kernels/image/rescale_op.cc"
"${MINDDATA_DIR}/kernels/image/rgba_to_bgr_op.cc"
"${MINDDATA_DIR}/kernels/image/rgba_to_rgb_op.cc"
"${MINDDATA_DIR}/kernels/image/sharpness_op.cc"
"${MINDDATA_DIR}/kernels/image/solarize_op.cc"
"${MINDDATA_DIR}/kernels/image/swap_red_blue_op.cc"
"${MINDDATA_DIR}/kernels/image/uniform_aug_op.cc"
"${MINDDATA_DIR}/kernels/image/resize_with_bbox_op.cc"
"${MINDDATA_DIR}/kernels/image/random_resize_with_bbox_op.cc"
"${MINDDATA_DIR}/kernels/image/random_color_op.cc"
"${MINDDATA_DIR}/kernels/image/random_crop_op.cc"
"${MINDDATA_DIR}/kernels/image/random_crop_with_bbox_op.cc"
"${MINDDATA_DIR}/kernels/image/random_horizontal_flip_op.cc"
"${MINDDATA_DIR}/kernels/image/random_horizontal_flip_with_bbox_op.cc"
"${MINDDATA_DIR}/kernels/image/random_posterize_op.cc"
"${MINDDATA_DIR}/kernels/image/random_resize_op.cc"
"${MINDDATA_DIR}/kernels/image/random_rotation_op.cc"
"${MINDDATA_DIR}/kernels/image/random_select_subpolicy_op.cc"
"${MINDDATA_DIR}/kernels/image/random_solarize_op.cc"
"${MINDDATA_DIR}/kernels/image/random_vertical_flip_op.cc"
"${MINDDATA_DIR}/kernels/image/random_vertical_flip_with_bbox_op.cc"
"${MINDDATA_DIR}/kernels/image/random_sharpness_op.cc"
"${MINDDATA_DIR}/kernels/image/rescale_op.cc"
"${MINDDATA_DIR}/kernels/image/rgba_to_bgr_op.cc"
"${MINDDATA_DIR}/kernels/image/rgba_to_rgb_op.cc"
"${MINDDATA_DIR}/kernels/image/sharpness_op.cc"
"${MINDDATA_DIR}/kernels/image/solarize_op.cc"
"${MINDDATA_DIR}/kernels/image/swap_red_blue_op.cc"
"${MINDDATA_DIR}/kernels/image/uniform_aug_op.cc"
"${MINDDATA_DIR}/kernels/image/resize_with_bbox_op.cc"
"${MINDDATA_DIR}/kernels/image/random_resize_with_bbox_op.cc"
"${MINDDATA_DIR}/kernels/image/random_color_op.cc"
)
list(REMOVE_ITEM MINDDATA_ENGINE_IR_DATASETOPS_SRC_FILES
@ -190,6 +216,12 @@ if (BUILD_MINDDATA STREQUAL "full")
"${MINDDATA_DIR}/engine/ir/datasetops/build_vocab_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/filter_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/sync_wait_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/skip_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/take_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/transfer_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/zip_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/rename_node.cc"
"${MINDDATA_DIR}/engine/ir/datasetops/concat_node.cc"
)
list(REMOVE_ITEM MINDDATA_ENGINE_CONSUMERS_SRC_FILES
"${MINDDATA_DIR}/engine/consumers/python_tree_consumer.cc"
@ -204,9 +236,9 @@ if (BUILD_MINDDATA STREQUAL "full")
)
include_directories("${CMAKE_BINARY_DIR}/minddata/dataset/engine/cache")
if (BUILD_MINDDATA_EXAMPLE AND (PLATFORM_ARM32 OR PLATFORM_ARM64))
set(MINDDATA_EXAMPLE_SRC ${CMAKE_CURRENT_SOURCE_DIR}/example/jni-example.cc)
endif ()
# if (BUILD_MINDDATA_EXAMPLE AND (PLATFORM_ARM32 OR PLATFORM_ARM64))
# set(MINDDATA_EXAMPLE_SRC ${CMAKE_CURRENT_SOURCE_DIR}/example/jni-example.cc)
# endif ()
add_library(minddata-lite SHARED
${MINDDATA_API_SRC_FILES}
@ -242,9 +274,6 @@ if (BUILD_MINDDATA STREQUAL "full")
securec
jpeg-turbo
jpeg
#opencv_core
#opencv_imgcodecs
#opencv_imgproc
mindspore::json
)
@ -260,11 +289,11 @@ if (BUILD_MINDDATA STREQUAL "full")
if (PLATFORM_ARM32 OR PLATFORM_ARM64)
target_link_libraries(minddata-lite log)
elseif (BUILD_MINDDATA_EXAMPLE)
add_executable(mdlite-example ${CMAKE_CURRENT_SOURCE_DIR}/example/x86-example.cc)
target_link_libraries(mdlite-example minddata-lite)
add_custom_command(TARGET mdlite-example POST_BUILD
COMMAND cp -rf ${CMAKE_CURRENT_SOURCE_DIR}/example/testCifar10Data ${CMAKE_BINARY_DIR}/minddata
)
# add_executable(mdlite-example ${CMAKE_CURRENT_SOURCE_DIR}/example/x86-example.cc)
# target_link_libraries(mdlite-example minddata-lite)
# add_custom_command(TARGET mdlite-example POST_BUILD
# COMMAND cp -rf ${CMAKE_CURRENT_SOURCE_DIR}/example/testCifar10Data ${CMAKE_BINARY_DIR}/minddata
# )
endif()
elseif (BUILD_MINDDATA STREQUAL "lite")
list(REMOVE_ITEM MINDDATA_CORE_SRC_FILES "${MINDDATA_DIR}/core/client.cc")