!13365 Fix random rotation input check

From: @ezphlow
Reviewed-by: 
Signed-off-by:
This commit is contained in:
mindspore-ci-bot 2021-03-17 09:42:37 +08:00 committed by Gitee
commit c0aa5f8132
17 changed files with 87 additions and 44 deletions

View File

@ -19,16 +19,19 @@
#include <fstream>
#include <unordered_set>
#include <utility>
#include <nlohmann/json.hpp>
#include "minddata/dataset/core/tensor.h"
#include "minddata/dataset/engine/runtime_context.h"
#include "minddata/dataset/include/iterator.h"
#include "minddata/dataset/include/samplers.h"
#include "minddata/dataset/include/transforms.h"
#include "minddata/dataset/util/path.h"
#include "minddata/dataset/util/status.h"
#include "minddata/dataset/include/type_id.h"
#include "minddata/dataset/core/client.h"
#include "minddata/dataset/core/type_id.h"
#include "minddata/dataset/engine/consumers/tree_consumer.h"
#include "minddata/dataset/engine/consumers/pull_based_tree_consumer.h"
#include "minddata/dataset/kernels/c_func_op.h"
#include "minddata/dataset/kernels/tensor_op.h"
@ -985,54 +988,81 @@ ManifestDataset::ManifestDataset(const std::vector<char> &dataset_file, const st
MindDataDataset::MindDataDataset(const std::vector<char> &dataset_file,
const std::vector<std::vector<char>> &columns_list,
const std::shared_ptr<Sampler> &sampler, nlohmann::json padded_sample,
const std::shared_ptr<Sampler> &sampler, nlohmann::json *padded_sample,
int64_t num_padded) {
auto sampler_obj = sampler ? sampler->Parse() : nullptr;
nlohmann::json sample = nullptr;
if (padded_sample) {
sample = *padded_sample;
}
auto ds = std::make_shared<MindDataNode>(CharToString(dataset_file), VectorCharToString(columns_list), sampler_obj,
padded_sample, num_padded);
sample, num_padded);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
MindDataDataset::MindDataDataset(const std::vector<char> &dataset_file,
const std::vector<std::vector<char>> &columns_list, Sampler *sampler,
nlohmann::json padded_sample, int64_t num_padded) {
nlohmann::json *padded_sample, int64_t num_padded) {
auto sampler_obj = sampler ? sampler->Parse() : nullptr;
nlohmann::json sample = nullptr;
if (padded_sample) {
sample = *padded_sample;
}
auto ds = std::make_shared<MindDataNode>(CharToString(dataset_file), VectorCharToString(columns_list), sampler_obj,
padded_sample, num_padded);
sample, num_padded);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
MindDataDataset::MindDataDataset(const std::vector<char> &dataset_file,
const std::vector<std::vector<char>> &columns_list,
const std::reference_wrapper<Sampler> sampler, nlohmann::json padded_sample,
const std::reference_wrapper<Sampler> sampler, nlohmann::json *padded_sample,
int64_t num_padded) {
auto sampler_obj = sampler.get().Parse();
nlohmann::json sample = nullptr;
if (padded_sample) {
sample = *padded_sample;
}
auto ds = std::make_shared<MindDataNode>(CharToString(dataset_file), VectorCharToString(columns_list), sampler_obj,
padded_sample, num_padded);
sample, num_padded);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
MindDataDataset::MindDataDataset(const std::vector<std::vector<char>> &dataset_files,
const std::vector<std::vector<char>> &columns_list,
const std::shared_ptr<Sampler> &sampler, nlohmann::json padded_sample,
const std::shared_ptr<Sampler> &sampler, nlohmann::json *padded_sample,
int64_t num_padded) {
auto sampler_obj = sampler ? sampler->Parse() : nullptr;
nlohmann::json sample = nullptr;
if (padded_sample) {
sample = *padded_sample;
}
auto ds = std::make_shared<MindDataNode>(VectorCharToString(dataset_files), VectorCharToString(columns_list),
sampler_obj, padded_sample, num_padded);
sampler_obj, sample, num_padded);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
MindDataDataset::MindDataDataset(const std::vector<std::vector<char>> &dataset_files,
const std::vector<std::vector<char>> &columns_list, Sampler *sampler,
nlohmann::json padded_sample, int64_t num_padded) {
nlohmann::json *padded_sample, int64_t num_padded) {
auto sampler_obj = sampler ? sampler->Parse() : nullptr;
nlohmann::json sample = nullptr;
if (padded_sample) {
sample = *padded_sample;
}
auto ds = std::make_shared<MindDataNode>(VectorCharToString(dataset_files), VectorCharToString(columns_list),
sampler_obj, padded_sample, num_padded);
sampler_obj, sample, num_padded);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
MindDataDataset::MindDataDataset(const std::vector<std::vector<char>> &dataset_files,
const std::vector<std::vector<char>> &columns_list,
const std::reference_wrapper<Sampler> sampler, nlohmann::json padded_sample,
const std::reference_wrapper<Sampler> sampler, nlohmann::json *padded_sample,
int64_t num_padded) {
auto sampler_obj = sampler.get().Parse();
nlohmann::json sample = nullptr;
if (padded_sample) {
sample = *padded_sample;
}
auto ds = std::make_shared<MindDataNode>(VectorCharToString(dataset_files), VectorCharToString(columns_list),
sampler_obj, padded_sample, num_padded);
sampler_obj, sample, num_padded);
ir_node_ = std::static_pointer_cast<DatasetNode>(ds);
}
#endif

View File

@ -22,7 +22,7 @@
#include "minddata/dataset/core/device_tensor.h"
#include "minddata/dataset/core/tensor_row.h"
#include "minddata/dataset/core/tensor.h"
#include "minddata/dataset/include/type_id.h"
#include "minddata/dataset/core/type_id.h"
#include "minddata/dataset/kernels/ir/tensor_operation.h"
#include "minddata/dataset/kernels/tensor_op.h"
#ifndef ENABLE_ANDROID

View File

@ -51,6 +51,7 @@
#include "minddata/dataset/core/config_manager.h"
#include "minddata/dataset/core/data_type.h"
#include "minddata/dataset/core/type_id.h"
#include "minddata/dataset/util/path.h"
#include "minddata/dataset/util/random.h"
#include "minddata/dataset/util/services.h"

View File

@ -17,6 +17,7 @@
#include "minddata/dataset/api/python/pybind_conversion.h"
#include "minddata/dataset/api/python/pybind_register.h"
#include "minddata/dataset/core/type_id.h"
#include "minddata/dataset/include/execute.h"
namespace mindspore {

View File

@ -19,8 +19,9 @@
#include "minddata/dataset/api/python/pybind_register.h"
#include "minddata/dataset/core/global_context.h"
#include "minddata/dataset/include/constants.h"
#include "minddata/dataset/api/python/pybind_conversion.h"
#include "minddata/dataset/core/type_id.h"
#include "minddata/dataset/include/constants.h"
#include "minddata/dataset/include/datasets.h"
namespace mindspore {

View File

@ -28,11 +28,12 @@
#include "pybind11/pybind11.h"
#include "pybind11/stl.h"
#include "pybind11/stl_bind.h"
#include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/include/samplers.h"
#include "minddata/dataset/api/python/pybind_register.h"
#include "minddata/dataset/core/type_id.h"
#include "minddata/dataset/engine/ir/cache/pre_built_dataset_cache.h"
#include "minddata/dataset/engine/ir/datasetops/source/csv_node.h"
#include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/include/samplers.h"
#include "minddata/dataset/kernels/ir/data/transforms_ir.h"
#include "minddata/dataset/kernels/py_func_op.h"
namespace py = pybind11;

View File

@ -16,8 +16,8 @@
#include "include/api/context.h"
#include "include/api/types.h"
#include "minddata/dataset/include/type_id.h"
#include "minddata/dataset/core/ascend_resource.h"
#include "minddata/dataset/core/type_id.h"
#include "minddata/dataset/kernels/image/image_utils.h"
namespace mindspore {

View File

@ -18,7 +18,7 @@
#include "minddata/dataset/core/device_tensor.h"
#include "minddata/dataset/include/constants.h"
#include "minddata/dataset/core/data_type.h"
#include "minddata/dataset/include/type_id.h"
#include "minddata/dataset/core/type_id.h"
#include "mindspore/core/ir/dtype/type_id.h"
#include "utils/hashing.h"
#ifndef ENABLE_ANDROID

View File

@ -24,7 +24,7 @@
#include <utility>
#include <functional>
#include "minddata/dataset/include/type_id.h"
#include "minddata/dataset/core/type_id.h"
#include "utils/ms_utils.h"
#include "minddata/dataset/include/constants.h"

View File

@ -46,7 +46,7 @@ Status SyncWaitNode::Build(std::vector<std::shared_ptr<DatasetOp>> *const node_o
// Right now barrier should only take num_rows_per_buffer = 1
// The reason for this is because having it otherwise can lead to blocking issues
// See barrier_op.h for more details
int32_t rows_per_buffer = 1;
const int32_t rows_per_buffer = 1;
auto op = std::make_shared<BarrierOp>(rows_per_buffer, connector_que_size_, condition_name_, callback_);
op->set_total_repeats(GetTotalRepeats());
op->set_num_repeats_per_epoch(GetNumRepeatsPerEpoch());

View File

@ -15,6 +15,7 @@
*/
#include <string>
#include <nlohmann/json.hpp>
#include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/engine/opt/pre/deep_copy_pass.h"
#include "minddata/dataset/engine/ir/datasetops/root_node.h"

View File

@ -15,6 +15,7 @@
*/
#include <string>
#include <nlohmann/json.hpp>
#include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/engine/opt/pre/input_validation_pass.h"

View File

@ -21,6 +21,7 @@
#include <unistd.h>
#include <algorithm>
#include <functional>
#include <map>
#include <memory>
#include <set>
@ -36,8 +37,6 @@
#include "minddata/dataset/include/json_fwd.hpp"
#include "minddata/dataset/include/samplers.h"
#include "minddata/dataset/include/text.h"
#include "minddata/dataset/include/type_id.h"
#include "minddata/dataset/engine/consumers/pull_based_tree_consumer.h"
namespace mindspore {
namespace dataset {
@ -53,7 +52,7 @@ class DatasetCache;
class DatasetNode;
class Iterator;
class PullBasedIteratorConsumer;
class PullBasedIterator;
class TensorOperation;
class SchemaObj;
@ -1243,21 +1242,21 @@ inline std::shared_ptr<ManifestDataset> Manifest(const std::string &dataset_file
class MindDataDataset : public Dataset {
public:
explicit MindDataDataset(const std::vector<char> &dataset_file, const std::vector<std::vector<char>> &columns_list,
const std::shared_ptr<Sampler> &sampler, nlohmann::json padded_sample, int64_t num_padded);
const std::shared_ptr<Sampler> &sampler, nlohmann::json *padded_sample, int64_t num_padded);
explicit MindDataDataset(const std::vector<char> &dataset_file, const std::vector<std::vector<char>> &columns_list,
Sampler *sampler, nlohmann::json padded_sample, int64_t num_padded);
Sampler *sampler, nlohmann::json *padded_sample, int64_t num_padded);
explicit MindDataDataset(const std::vector<char> &dataset_file, const std::vector<std::vector<char>> &columns_list,
const std::reference_wrapper<Sampler> sampler, nlohmann::json padded_sample,
const std::reference_wrapper<Sampler> sampler, nlohmann::json *padded_sample,
int64_t num_padded);
explicit MindDataDataset(const std::vector<std::vector<char>> &dataset_files,
const std::vector<std::vector<char>> &columns_list, const std::shared_ptr<Sampler> &sampler,
nlohmann::json padded_sample, int64_t num_padded);
nlohmann::json *padded_sample, int64_t num_padded);
explicit MindDataDataset(const std::vector<std::vector<char>> &dataset_files,
const std::vector<std::vector<char>> &columns_list, Sampler *sampler,
nlohmann::json padded_sample, int64_t num_padded);
nlohmann::json *padded_sample, int64_t num_padded);
explicit MindDataDataset(const std::vector<std::vector<char>> &dataset_files,
const std::vector<std::vector<char>> &columns_list,
const std::reference_wrapper<Sampler> sampler, nlohmann::json padded_sample,
const std::reference_wrapper<Sampler> sampler, nlohmann::json *padded_sample,
int64_t num_padded);
~MindDataDataset() = default;
};
@ -1275,7 +1274,7 @@ class MindDataDataset : public Dataset {
/// \return Shared pointer to the current MindDataDataset
inline std::shared_ptr<MindDataDataset> MindData(
const std::string &dataset_file, const std::vector<std::string> &columns_list = {},
const std::shared_ptr<Sampler> &sampler = std::make_shared<RandomSampler>(), nlohmann::json padded_sample = nullptr,
const std::shared_ptr<Sampler> &sampler = std::make_shared<RandomSampler>(), nlohmann::json *padded_sample = nullptr,
int64_t num_padded = 0) {
return std::make_shared<MindDataDataset>(StringToChar(dataset_file), VectorStringToChar(columns_list), sampler,
padded_sample, num_padded);
@ -1292,7 +1291,7 @@ inline std::shared_ptr<MindDataDataset> MindData(
/// \return Shared pointer to the current MindDataDataset
inline std::shared_ptr<MindDataDataset> MindData(const std::string &dataset_file,
const std::vector<std::string> &columns_list, Sampler *sampler,
nlohmann::json padded_sample = nullptr, int64_t num_padded = 0) {
nlohmann::json *padded_sample = nullptr, int64_t num_padded = 0) {
return std::make_shared<MindDataDataset>(StringToChar(dataset_file), VectorStringToChar(columns_list), sampler,
padded_sample, num_padded);
}
@ -1309,7 +1308,7 @@ inline std::shared_ptr<MindDataDataset> MindData(const std::string &dataset_file
inline std::shared_ptr<MindDataDataset> MindData(const std::string &dataset_file,
const std::vector<std::string> &columns_list,
const std::reference_wrapper<Sampler> sampler,
nlohmann::json padded_sample = nullptr, int64_t num_padded = 0) {
nlohmann::json *padded_sample = nullptr, int64_t num_padded = 0) {
return std::make_shared<MindDataDataset>(StringToChar(dataset_file), VectorStringToChar(columns_list), sampler,
padded_sample, num_padded);
}
@ -1326,7 +1325,7 @@ inline std::shared_ptr<MindDataDataset> MindData(const std::string &dataset_file
/// \return Shared pointer to the current MindDataDataset
inline std::shared_ptr<MindDataDataset> MindData(
const std::vector<std::string> &dataset_files, const std::vector<std::string> &columns_list = {},
const std::shared_ptr<Sampler> &sampler = std::make_shared<RandomSampler>(), nlohmann::json padded_sample = nullptr,
const std::shared_ptr<Sampler> &sampler = std::make_shared<RandomSampler>(), nlohmann::json *padded_sample = nullptr,
int64_t num_padded = 0) {
return std::make_shared<MindDataDataset>(VectorStringToChar(dataset_files), VectorStringToChar(columns_list), sampler,
padded_sample, num_padded);
@ -1342,7 +1341,7 @@ inline std::shared_ptr<MindDataDataset> MindData(
/// \return Shared pointer to the current MindDataDataset
inline std::shared_ptr<MindDataDataset> MindData(const std::vector<std::string> &dataset_files,
const std::vector<std::string> &columns_list, Sampler *sampler,
nlohmann::json padded_sample = nullptr, int64_t num_padded = 0) {
nlohmann::json *padded_sample = nullptr, int64_t num_padded = 0) {
return std::make_shared<MindDataDataset>(VectorStringToChar(dataset_files), VectorStringToChar(columns_list), sampler,
padded_sample, num_padded);
}
@ -1358,7 +1357,7 @@ inline std::shared_ptr<MindDataDataset> MindData(const std::vector<std::string>
inline std::shared_ptr<MindDataDataset> MindData(const std::vector<std::string> &dataset_files,
const std::vector<std::string> &columns_list,
const std::reference_wrapper<Sampler> sampler,
nlohmann::json padded_sample = nullptr, int64_t num_padded = 0) {
nlohmann::json *padded_sample = nullptr, int64_t num_padded = 0) {
return std::make_shared<MindDataDataset>(VectorStringToChar(dataset_files), VectorStringToChar(columns_list), sampler,
padded_sample, num_padded);
}

View File

@ -1114,6 +1114,11 @@ class RandomRotation(ImageTensorOperation):
@check_random_rotation
def __init__(self, degrees, resample=Inter.NEAREST, expand=False, center=None, fill_value=0):
if isinstance(degrees, numbers.Number):
degrees = degrees % 360
if isinstance(degrees, (list, tuple)):
degrees = [degrees[0] % 360, degrees[1] % 360]
self.degrees = degrees
self.resample = resample
self.expand = expand
@ -1121,6 +1126,8 @@ class RandomRotation(ImageTensorOperation):
self.fill_value = fill_value
def parse(self):
# pylint false positive
# pylint: disable=E1130
degrees = (-self.degrees, self.degrees) if isinstance(self.degrees, numbers.Number) else self.degrees
interpolation = DE_C_INTER_MODE[self.resample]
expand = self.expand

View File

@ -289,7 +289,7 @@ TEST_F(MindDataTestPipeline, TestMindDataSuccess7) {
std::string file_path1 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0";
std::vector<std::string> file_list = {file_path1};
std::shared_ptr<Dataset> ds =
MindData(file_list, {"file_name", "label"}, std::make_shared<SequentialSampler>(), pad, 4);
MindData(file_list, {"file_name", "label"}, std::make_shared<SequentialSampler>(), &pad, 4);
EXPECT_NE(ds, nullptr);
// Create a Skip operation on ds, skip original data in mindrecord and get padded samples
@ -341,7 +341,7 @@ TEST_F(MindDataTestPipeline, TestMindDataSuccess8) {
std::string file_path1 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0";
std::vector<std::string> file_list = {file_path1};
std::shared_ptr<Dataset> ds =
MindData(file_list, {"file_name", "label"}, std::make_shared<SequentialSampler>(), pad, 4);
MindData(file_list, {"file_name", "label"}, std::make_shared<SequentialSampler>(), &pad, 4);
EXPECT_NE(ds, nullptr);
std::vector<mindspore::dataset::DataType> types = ToDETypes(ds->GetOutputTypes());
@ -412,13 +412,13 @@ TEST_F(MindDataTestPipeline, TestMindDataSuccess9) {
std::string file_path1 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0";
std::vector<std::string> file_list = {file_path1};
std::shared_ptr<Dataset> ds1 =
MindData(file_list, {"file_name", "label"}, std::make_shared<SequentialSampler>(), pad, 4);
MindData(file_list, {"file_name", "label"}, std::make_shared<SequentialSampler>(), &pad, 4);
EXPECT_NE(ds1, nullptr);
ds1 = ds1->Skip(5);
EXPECT_NE(ds1, nullptr);
std::shared_ptr<Dataset> ds2 =
MindData(file_list, {"file_name", "label"}, std::make_shared<SequentialSampler>(), pad, 4);
MindData(file_list, {"file_name", "label"}, std::make_shared<SequentialSampler>(), &pad, 4);
EXPECT_NE(ds2, nullptr);
ds2 = ds2->Skip(5);
EXPECT_NE(ds2, nullptr);
@ -588,7 +588,7 @@ TEST_F(MindDataTestPipeline, TestMindDataFail4) {
// Create a MindData Dataset
std::string file_path2 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0";
std::shared_ptr<Dataset> ds2 = MindData(file_path2, {"label"}, std::make_shared<RandomSampler>(), pad, -2);
std::shared_ptr<Dataset> ds2 = MindData(file_path2, {"label"}, std::make_shared<RandomSampler>(), &pad, -2);
EXPECT_NE(ds2, nullptr);
// Create an iterator over the result of the above dataset
@ -598,7 +598,7 @@ TEST_F(MindDataTestPipeline, TestMindDataFail4) {
// Create a MindData Dataset
std::string file_path3 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0";
std::shared_ptr<Dataset> ds3 = MindData(file_path3, {}, std::make_shared<RandomSampler>(), pad, 1);
std::shared_ptr<Dataset> ds3 = MindData(file_path3, {}, std::make_shared<RandomSampler>(), &pad, 1);
EXPECT_NE(ds3, nullptr);
// Create an iterator over the result of the above dataset
@ -614,7 +614,7 @@ TEST_F(MindDataTestPipeline, TestMindDataFail4) {
// Create a MindData Dataset
std::string file_path4 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0";
std::shared_ptr<Dataset> ds4 =
MindData(file_path4, {"file_name", "label"}, std::make_shared<RandomSampler>(), pad2, 1);
MindData(file_path4, {"file_name", "label"}, std::make_shared<RandomSampler>(), &pad2, 1);
EXPECT_NE(ds4, nullptr);
// Create an iterator over the result of the above dataset

View File

@ -20,7 +20,8 @@
#include "include/api/status.h"
#include "include/api/types.h"
#include "minddata/dataset/core/tensor_shape.h"
#include "minddata/dataset/include/type_id.h"
#include "minddata/dataset/core/type_id.h"
#include "minddata/dataset/core/de_tensor.h"
#include "utils/log_adapter.h"
using mindspore::Status;