forked from mindspore-Ecosystem/mindspore
reorganize headers and tests
remove headers fix iterator file fix pr comments fix new file
This commit is contained in:
parent
64b0feb7a2
commit
deb1146474
|
@ -13,37 +13,11 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "utils/log_adapter.h"
|
||||
#include "utils/ms_utils.h"
|
||||
#include "common/common.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "securec.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
#include "minddata/dataset/include/status.h"
|
||||
#include "minddata/dataset/include/transforms.h"
|
||||
#include "minddata/dataset/include/iterator.h"
|
||||
#include "minddata/dataset/core/constants.h"
|
||||
#include "minddata/dataset/core/tensor_shape.h"
|
||||
#include "minddata/dataset/core/tensor.h"
|
||||
#include "minddata/dataset/include/samplers.h"
|
||||
|
||||
using namespace mindspore::dataset::api;
|
||||
using mindspore::MsLogLevel::ERROR;
|
||||
using mindspore::ExceptionType::NoExceptionType;
|
||||
using mindspore::LogStream;
|
||||
using mindspore::dataset::Tensor;
|
||||
using mindspore::dataset::TensorShape;
|
||||
using mindspore::dataset::TensorImpl;
|
||||
using mindspore::dataset::DataType;
|
||||
using mindspore::dataset::Status;
|
||||
using mindspore::dataset::BorderType;
|
||||
using mindspore::dataset::dsize_t;
|
||||
|
||||
class MindDataTestPipeline : public UT::DatasetOpTesting {
|
||||
protected:
|
||||
|
@ -83,14 +57,6 @@ TEST_F(MindDataTestPipeline, TestCifar10Dataset) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCifar10DatasetFail1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10DatasetFail1.";
|
||||
|
||||
// Create a Cifar10 Dataset
|
||||
std::shared_ptr<Dataset> ds = Cifar10("", RandomSampler(false, 10));
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCifar100Dataset) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar100Dataset.";
|
||||
|
||||
|
@ -133,3 +99,11 @@ TEST_F(MindDataTestPipeline, TestCifar100DatasetFail1) {
|
|||
std::shared_ptr<Dataset> ds = Cifar100("", RandomSampler(false, 10));
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCifar10DatasetFail1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10DatasetFail1.";
|
||||
|
||||
// Create a Cifar10 Dataset
|
||||
std::shared_ptr<Dataset> ds = Cifar10("", RandomSampler(false, 10));
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
|
|
@ -14,9 +14,9 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
#include "common/common.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
#include "minddata/dataset/core/global_context.h"
|
||||
#include "minddata/dataset/core/config_manager.h"
|
||||
#include "minddata/dataset/core/global_context.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
|
||||
using namespace mindspore::dataset::api;
|
||||
using mindspore::dataset::ShuffleMode;
|
||||
|
@ -27,76 +27,6 @@ class MindDataTestPipeline : public UT::DatasetOpTesting {
|
|||
protected:
|
||||
};
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetBasic) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetBasic.";
|
||||
|
||||
// Create a CLUEFile Dataset, with single CLUE file
|
||||
std::string clue_file = datasets_root_path_ + "/testCLUE/afqmc/train.json";
|
||||
std::string task = "AFQMC";
|
||||
std::string usage = "train";
|
||||
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 2);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("sentence1"), row.end());
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["sentence1"];
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 2 samples
|
||||
EXPECT_EQ(i, 2);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetDistribution) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetDistribution.";
|
||||
|
||||
// Create a CLUEFile Dataset, with single CLUE file
|
||||
std::string clue_file = datasets_root_path_ + "/testCLUE/afqmc/train.json";
|
||||
std::string task = "AFQMC";
|
||||
std::string usage = "train";
|
||||
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 3, 0);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("sentence1"), row.end());
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["sentence1"];
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 1 samples
|
||||
EXPECT_EQ(i, 1);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetAFQMC) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetAFQMC.";
|
||||
|
||||
|
@ -194,6 +124,41 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetAFQMC) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetBasic) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetBasic.";
|
||||
|
||||
// Create a CLUEFile Dataset, with single CLUE file
|
||||
std::string clue_file = datasets_root_path_ + "/testCLUE/afqmc/train.json";
|
||||
std::string task = "AFQMC";
|
||||
std::string usage = "train";
|
||||
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 2);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("sentence1"), row.end());
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["sentence1"];
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 2 samples
|
||||
EXPECT_EQ(i, 2);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetCMNLI) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetCMNLI.";
|
||||
|
||||
|
@ -284,6 +249,74 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetCSL) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetDistribution) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetDistribution.";
|
||||
|
||||
// Create a CLUEFile Dataset, with single CLUE file
|
||||
std::string clue_file = datasets_root_path_ + "/testCLUE/afqmc/train.json";
|
||||
std::string task = "AFQMC";
|
||||
std::string usage = "train";
|
||||
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 3, 0);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("sentence1"), row.end());
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["sentence1"];
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 1 samples
|
||||
EXPECT_EQ(i, 1);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetException) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetException.";
|
||||
// Create a CLUE Dataset
|
||||
std::string clue_file = datasets_root_path_ + "/testCLUE/wsc/train.json";
|
||||
std::string task = "WSC";
|
||||
std::string usage = "train";
|
||||
std::string invalid_clue_file = "./NotExistFile";
|
||||
|
||||
std::shared_ptr<Dataset> ds0 = CLUE({}, task, usage);
|
||||
EXPECT_EQ(ds0, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds1 = CLUE({invalid_clue_file}, task, usage);
|
||||
EXPECT_EQ(ds1, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds2 = CLUE({clue_file}, "invalid_task", usage);
|
||||
EXPECT_EQ(ds2, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds3 = CLUE({clue_file}, task, "invalid_usage");
|
||||
EXPECT_EQ(ds3, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds4 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 2, 2);
|
||||
EXPECT_EQ(ds4, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds5 = CLUE({clue_file}, task, usage, -1, ShuffleMode::kGlobal);
|
||||
EXPECT_EQ(ds5, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds6 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, -1);
|
||||
EXPECT_EQ(ds6, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds7 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 0, -1);
|
||||
EXPECT_EQ(ds7, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetIFLYTEK) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetIFLYTEK.";
|
||||
|
||||
|
@ -329,6 +362,129 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetIFLYTEK) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleFiles) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetShuffleFiles.";
|
||||
// Test CLUE Dataset with files shuffle, num_parallel_workers=1
|
||||
|
||||
// Set configuration
|
||||
uint32_t original_seed = GlobalContext::config_manager()->seed();
|
||||
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
|
||||
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
|
||||
GlobalContext::config_manager()->set_seed(135);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(1);
|
||||
|
||||
// Create a CLUE Dataset, with two text files
|
||||
// Note: train.json has 3 rows
|
||||
// Note: dev.json has 3 rows
|
||||
// Use default of all samples
|
||||
// They have the same keywords
|
||||
// Set shuffle to files shuffle
|
||||
std::string clue_file1 = datasets_root_path_ + "/testCLUE/afqmc/train.json";
|
||||
std::string clue_file2 = datasets_root_path_ + "/testCLUE/afqmc/dev.json";
|
||||
std::string task = "AFQMC";
|
||||
std::string usage = "train";
|
||||
std::shared_ptr<Dataset> ds = CLUE({clue_file1, clue_file2}, task, usage, 0, ShuffleMode::kFiles);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset.
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("sentence1"), row.end());
|
||||
std::vector<std::string> expected_result = {
|
||||
"蚂蚁借呗等额还款能否换成先息后本",
|
||||
"蚂蚁花呗说我违约了",
|
||||
"帮我看看本月花呗账单结清了没",
|
||||
"你有花呗吗",
|
||||
"吃饭能用花呗吗",
|
||||
"蚂蚁花呗支付金额有什么限制"
|
||||
};
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["sentence1"];
|
||||
std::string_view sv;
|
||||
text->GetItemAt(&sv, {0});
|
||||
std::string ss(sv);
|
||||
// Compare against expected result
|
||||
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 3 + 3 = 6 samples
|
||||
EXPECT_EQ(i, 6);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
|
||||
// Restore configuration
|
||||
GlobalContext::config_manager()->set_seed(original_seed);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleGlobal) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetShuffleGlobal.";
|
||||
// Test CLUE Dataset with GLOBLE shuffle
|
||||
|
||||
// Set configuration
|
||||
uint32_t original_seed = GlobalContext::config_manager()->seed();
|
||||
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
|
||||
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
|
||||
GlobalContext::config_manager()->set_seed(135);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(4);
|
||||
|
||||
// Create a CLUEFile Dataset, with single CLUE file
|
||||
std::string clue_file = datasets_root_path_ + "/testCLUE/afqmc/train.json";
|
||||
std::string task = "AFQMC";
|
||||
std::string usage = "train";
|
||||
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("sentence1"), row.end());
|
||||
std::vector<std::string> expected_result = {
|
||||
"蚂蚁花呗说我违约了",
|
||||
"帮我看看本月花呗账单结清了没",
|
||||
"蚂蚁借呗等额还款能否换成先息后本"
|
||||
};
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["sentence1"];
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
std::string_view sv;
|
||||
text->GetItemAt(&sv, {0});
|
||||
std::string ss(sv);
|
||||
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 3 samples
|
||||
EXPECT_EQ(i, 3);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
|
||||
// Restore configuration
|
||||
GlobalContext::config_manager()->set_seed(original_seed);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetTNEWS) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetTNEWS.";
|
||||
|
||||
|
@ -418,159 +574,3 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetWSC) {
|
|||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleGlobal) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetShuffleGlobal.";
|
||||
// Test CLUE Dataset with GLOBLE shuffle
|
||||
|
||||
// Set configuration
|
||||
uint32_t original_seed = GlobalContext::config_manager()->seed();
|
||||
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
|
||||
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
|
||||
GlobalContext::config_manager()->set_seed(135);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(4);
|
||||
|
||||
// Create a CLUEFile Dataset, with single CLUE file
|
||||
std::string clue_file = datasets_root_path_ + "/testCLUE/afqmc/train.json";
|
||||
std::string task = "AFQMC";
|
||||
std::string usage = "train";
|
||||
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("sentence1"), row.end());
|
||||
std::vector<std::string> expected_result = {
|
||||
"蚂蚁花呗说我违约了",
|
||||
"帮我看看本月花呗账单结清了没",
|
||||
"蚂蚁借呗等额还款能否换成先息后本"
|
||||
};
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["sentence1"];
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
std::string_view sv;
|
||||
text->GetItemAt(&sv, {0});
|
||||
std::string ss(sv);
|
||||
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 3 samples
|
||||
EXPECT_EQ(i, 3);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
|
||||
// Restore configuration
|
||||
GlobalContext::config_manager()->set_seed(original_seed);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleFiles) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetShuffleFiles.";
|
||||
// Test CLUE Dataset with files shuffle, num_parallel_workers=1
|
||||
|
||||
// Set configuration
|
||||
uint32_t original_seed = GlobalContext::config_manager()->seed();
|
||||
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
|
||||
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
|
||||
GlobalContext::config_manager()->set_seed(135);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(1);
|
||||
|
||||
// Create a CLUE Dataset, with two text files
|
||||
// Note: train.json has 3 rows
|
||||
// Note: dev.json has 3 rows
|
||||
// Use default of all samples
|
||||
// They have the same keywords
|
||||
// Set shuffle to files shuffle
|
||||
std::string clue_file1 = datasets_root_path_ + "/testCLUE/afqmc/train.json";
|
||||
std::string clue_file2 = datasets_root_path_ + "/testCLUE/afqmc/dev.json";
|
||||
std::string task = "AFQMC";
|
||||
std::string usage = "train";
|
||||
std::shared_ptr<Dataset> ds = CLUE({clue_file1, clue_file2}, task, usage, 0, ShuffleMode::kFiles);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset.
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("sentence1"), row.end());
|
||||
std::vector<std::string> expected_result = {
|
||||
"蚂蚁借呗等额还款能否换成先息后本",
|
||||
"蚂蚁花呗说我违约了",
|
||||
"帮我看看本月花呗账单结清了没",
|
||||
"你有花呗吗",
|
||||
"吃饭能用花呗吗",
|
||||
"蚂蚁花呗支付金额有什么限制"
|
||||
};
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["sentence1"];
|
||||
std::string_view sv;
|
||||
text->GetItemAt(&sv, {0});
|
||||
std::string ss(sv);
|
||||
// Compare against expected result
|
||||
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 3 + 3 = 6 samples
|
||||
EXPECT_EQ(i, 6);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
|
||||
// Restore configuration
|
||||
GlobalContext::config_manager()->set_seed(original_seed);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCLUEDatasetException) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetException.";
|
||||
// Create a CLUE Dataset
|
||||
std::string clue_file = datasets_root_path_ + "/testCLUE/wsc/train.json";
|
||||
std::string task = "WSC";
|
||||
std::string usage = "train";
|
||||
std::string invalid_clue_file = "./NotExistFile";
|
||||
|
||||
std::shared_ptr<Dataset> ds0 = CLUE({}, task, usage);
|
||||
EXPECT_EQ(ds0, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds1 = CLUE({invalid_clue_file}, task, usage);
|
||||
EXPECT_EQ(ds1, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds2 = CLUE({clue_file}, "invalid_task", usage);
|
||||
EXPECT_EQ(ds2, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds3 = CLUE({clue_file}, task, "invalid_usage");
|
||||
EXPECT_EQ(ds3, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds4 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 2, 2);
|
||||
EXPECT_EQ(ds4, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds5 = CLUE({clue_file}, task, usage, -1, ShuffleMode::kGlobal);
|
||||
EXPECT_EQ(ds5, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds6 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, -1);
|
||||
EXPECT_EQ(ds6, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds7 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 0, -1);
|
||||
EXPECT_EQ(ds7, nullptr);
|
||||
}
|
||||
|
|
|
@ -13,42 +13,54 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "utils/log_adapter.h"
|
||||
#include "utils/ms_utils.h"
|
||||
#include "common/common.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "securec.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
#include "minddata/dataset/include/status.h"
|
||||
#include "minddata/dataset/include/transforms.h"
|
||||
#include "minddata/dataset/include/iterator.h"
|
||||
#include "minddata/dataset/core/constants.h"
|
||||
#include "minddata/dataset/core/tensor_shape.h"
|
||||
#include "minddata/dataset/core/tensor.h"
|
||||
#include "minddata/dataset/include/samplers.h"
|
||||
|
||||
using namespace mindspore::dataset::api;
|
||||
using mindspore::MsLogLevel::ERROR;
|
||||
using mindspore::ExceptionType::NoExceptionType;
|
||||
using mindspore::LogStream;
|
||||
using mindspore::dataset::Tensor;
|
||||
using mindspore::dataset::TensorShape;
|
||||
using mindspore::dataset::TensorImpl;
|
||||
using mindspore::dataset::DataType;
|
||||
using mindspore::dataset::Status;
|
||||
using mindspore::dataset::BorderType;
|
||||
using mindspore::dataset::dsize_t;
|
||||
|
||||
class MindDataTestPipeline : public UT::DatasetOpTesting {
|
||||
protected:
|
||||
};
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCocoDefault) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDefault.";
|
||||
// Create a Coco Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testCOCO/train";
|
||||
std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json";
|
||||
|
||||
std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto image = row["image"];
|
||||
auto bbox = row["bbox"];
|
||||
auto category_id = row["category_id"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
MS_LOG(INFO) << "Tensor bbox shape: " << bbox->shape();
|
||||
MS_LOG(INFO) << "Tensor category_id shape: " << category_id->shape();
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 6);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCocoDetection) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDetection.";
|
||||
// Create a Coco Dataset
|
||||
|
@ -99,55 +111,22 @@ TEST_F(MindDataTestPipeline, TestCocoDetection) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCocoStuff) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoStuff.";
|
||||
TEST_F(MindDataTestPipeline, TestCocoException) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoException.";
|
||||
// Create a Coco Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testCOCO/train";
|
||||
std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json";
|
||||
std::string invalid_folder_path = "./NotExist";
|
||||
std::string invalid_annotation_file = "./NotExistFile";
|
||||
|
||||
std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file, "Stuff", false, SequentialSampler(0, 6));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
std::shared_ptr<Dataset> ds = Coco(invalid_folder_path, annotation_file);
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
std::shared_ptr<Dataset> ds1 = Coco(folder_path, invalid_annotation_file);
|
||||
EXPECT_EQ(ds1, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
std::string expect_file[] = {"000000391895", "000000318219", "000000554625", "000000574769", "000000060623",
|
||||
"000000309022"};
|
||||
std::vector<std::vector<float>> expect_segmentation_vector =
|
||||
{{10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0,
|
||||
70.0, 72.0, 73.0, 74.0, 75.0, -1.0, -1.0, -1.0, -1.0, -1.0},
|
||||
{20.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0,
|
||||
10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, -1.0},
|
||||
{40.0, 42.0, 43.0, 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 40.0, 41.0, 42.0},
|
||||
{50.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, 62.0, 63.0},
|
||||
{60.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0},
|
||||
{60.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0}};
|
||||
std::vector<std::vector<dsize_t>> expect_size = {{2, 10}, {2, 11}, {1, 12}, {1, 13}, {1, 14}, {2, 7}};
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto image = row["image"];
|
||||
auto segmentation = row["segmentation"];
|
||||
auto iscrowd = row["iscrowd"];
|
||||
std::shared_ptr<Tensor> expect_image;
|
||||
Tensor::CreateFromFile(folder_path + "/" + expect_file[i] + ".jpg", &expect_image);
|
||||
EXPECT_EQ(*image, *expect_image);
|
||||
std::shared_ptr<Tensor> expect_segmentation;
|
||||
Tensor::CreateFromVector(expect_segmentation_vector[i], TensorShape(expect_size[i]), &expect_segmentation);
|
||||
EXPECT_EQ(*segmentation, *expect_segmentation);
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 6);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
std::shared_ptr<Dataset> ds2 = Coco(folder_path, annotation_file, "valid_mode");
|
||||
EXPECT_EQ(ds2, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCocoKeypoint) {
|
||||
|
@ -261,13 +240,13 @@ TEST_F(MindDataTestPipeline, TestCocoPanoptic) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCocoDefault) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDetection.";
|
||||
TEST_F(MindDataTestPipeline, TestCocoStuff) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoStuff.";
|
||||
// Create a Coco Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testCOCO/train";
|
||||
std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json";
|
||||
|
||||
std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file);
|
||||
std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file, "Stuff", false, SequentialSampler(0, 6));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
|
@ -279,14 +258,29 @@ TEST_F(MindDataTestPipeline, TestCocoDefault) {
|
|||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
std::string expect_file[] = {"000000391895", "000000318219", "000000554625", "000000574769", "000000060623",
|
||||
"000000309022"};
|
||||
std::vector<std::vector<float>> expect_segmentation_vector =
|
||||
{{10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0,
|
||||
70.0, 72.0, 73.0, 74.0, 75.0, -1.0, -1.0, -1.0, -1.0, -1.0},
|
||||
{20.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0,
|
||||
10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, -1.0},
|
||||
{40.0, 42.0, 43.0, 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 40.0, 41.0, 42.0},
|
||||
{50.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, 62.0, 63.0},
|
||||
{60.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0},
|
||||
{60.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0}};
|
||||
std::vector<std::vector<dsize_t>> expect_size = {{2, 10}, {2, 11}, {1, 12}, {1, 13}, {1, 14}, {2, 7}};
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto image = row["image"];
|
||||
auto bbox = row["bbox"];
|
||||
auto category_id = row["category_id"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
MS_LOG(INFO) << "Tensor bbox shape: " << bbox->shape();
|
||||
MS_LOG(INFO) << "Tensor category_id shape: " << category_id->shape();
|
||||
auto segmentation = row["segmentation"];
|
||||
auto iscrowd = row["iscrowd"];
|
||||
std::shared_ptr<Tensor> expect_image;
|
||||
Tensor::CreateFromFile(folder_path + "/" + expect_file[i] + ".jpg", &expect_image);
|
||||
EXPECT_EQ(*image, *expect_image);
|
||||
std::shared_ptr<Tensor> expect_segmentation;
|
||||
Tensor::CreateFromVector(expect_segmentation_vector[i], TensorShape(expect_size[i]), &expect_segmentation);
|
||||
EXPECT_EQ(*segmentation, *expect_segmentation);
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
}
|
||||
|
@ -296,21 +290,3 @@ TEST_F(MindDataTestPipeline, TestCocoDefault) {
|
|||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCocoException) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDetection.";
|
||||
// Create a Coco Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testCOCO/train";
|
||||
std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json";
|
||||
std::string invalid_folder_path = "./NotExist";
|
||||
std::string invalid_annotation_file = "./NotExistFile";
|
||||
|
||||
std::shared_ptr<Dataset> ds = Coco(invalid_folder_path, annotation_file);
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds1 = Coco(folder_path, invalid_annotation_file);
|
||||
EXPECT_EQ(ds1, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds2 = Coco(folder_path, annotation_file, "valid_mode");
|
||||
EXPECT_EQ(ds2, nullptr);
|
||||
}
|
||||
|
|
|
@ -13,41 +13,15 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "utils/log_adapter.h"
|
||||
#include "utils/ms_utils.h"
|
||||
#include "common/common.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "./securec.h"
|
||||
|
||||
#include "minddata/dataset/core/client.h"
|
||||
#include "minddata/dataset/core/config_manager.h"
|
||||
#include "minddata/dataset/core/constants.h"
|
||||
#include "minddata/dataset/core/global_context.h"
|
||||
#include "minddata/dataset/core/tensor.h"
|
||||
#include "minddata/dataset/core/tensor_shape.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
#include "minddata/dataset/include/iterator.h"
|
||||
#include "minddata/dataset/include/samplers.h"
|
||||
#include "minddata/dataset/include/status.h"
|
||||
#include "minddata/dataset/include/transforms.h"
|
||||
|
||||
using namespace mindspore::dataset;
|
||||
using namespace mindspore::dataset::api;
|
||||
using mindspore::LogStream;
|
||||
using mindspore::dataset::DataType;
|
||||
using mindspore::dataset::ShuffleMode;
|
||||
using mindspore::dataset::Status;
|
||||
using mindspore::dataset::Tensor;
|
||||
using mindspore::dataset::TensorImpl;
|
||||
using mindspore::dataset::TensorShape;
|
||||
using mindspore::ExceptionType::NoExceptionType;
|
||||
using mindspore::MsLogLevel::ERROR;
|
||||
|
||||
class MindDataTestPipeline : public UT::DatasetOpTesting {
|
||||
protected:
|
||||
|
@ -109,6 +83,87 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetBasic) {
|
|||
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail1.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with invalid samplers=-1
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, -1);
|
||||
|
||||
// Expect failure: Number of samples cannot be negative
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail2) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail2.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with wrongful empty dataset_files input
|
||||
std::shared_ptr<Dataset> ds = TextFile({});
|
||||
|
||||
// Expect failure: dataset_files is not specified
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail3) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail3.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with non-existent dataset_files input
|
||||
std::shared_ptr<Dataset> ds = TextFile({"notexist.txt"}, 0, ShuffleMode::kFalse);
|
||||
|
||||
// Expect failure: specified dataset_files does not exist
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail4) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail4.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with empty string dataset_files input
|
||||
std::shared_ptr<Dataset> ds = TextFile({""}, 0, ShuffleMode::kFiles);
|
||||
|
||||
// Expect failure: specified dataset_files does not exist
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail5) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail5.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with invalid num_shards=0 value
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 1, ShuffleMode::kFalse, 0);
|
||||
|
||||
// Expect failure: Number of shards cannot be <=0
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail6) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail6.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with invalid shard_id=-1 value
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kFiles, -1);
|
||||
|
||||
// Expect failure: shard_id cannot be negative
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail7) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail7.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with invalid shard_id=2 and num_shards=2 combination
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kGlobal, 2, 2);
|
||||
|
||||
// Expect failure: Cannot have shard_id >= num_shards
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFalse1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleFalse1.";
|
||||
// Test TextFile Dataset with two text files and no shuffle, num_parallel_workers=1
|
||||
|
@ -224,6 +279,125 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFalse4Shard) {
|
|||
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleFiles1.";
|
||||
// Test TextFile Dataset with files shuffle, num_parallel_workers=1
|
||||
|
||||
// Set configuration
|
||||
uint32_t original_seed = GlobalContext::config_manager()->seed();
|
||||
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
|
||||
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
|
||||
GlobalContext::config_manager()->set_seed(135);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(1);
|
||||
|
||||
// Create a TextFile Dataset, with two text files
|
||||
// Note: 1.txt has 3 rows
|
||||
// Note: 2.txt has 2 rows
|
||||
// Use default of all samples
|
||||
// Set shuffle to files shuffle
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kFiles);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset.
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("text"), row.end());
|
||||
std::vector<std::string> expected_result = {
|
||||
"This is a text file.", "Be happy every day.", "Good luck to everyone.", "Another file.", "End of file.",
|
||||
};
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["text"];
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
std::string_view sv;
|
||||
text->GetItemAt(&sv, {0});
|
||||
std::string ss(sv);
|
||||
MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50);
|
||||
// Compare against expected result
|
||||
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 2 + 3 = 5 samples
|
||||
EXPECT_EQ(i, 5);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
|
||||
// Restore configuration
|
||||
GlobalContext::config_manager()->set_seed(original_seed);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles4) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleFiles4.";
|
||||
// Test TextFile Dataset with files shuffle, num_parallel_workers=4
|
||||
|
||||
// Set configuration
|
||||
uint32_t original_seed = GlobalContext::config_manager()->seed();
|
||||
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
|
||||
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
|
||||
GlobalContext::config_manager()->set_seed(135);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(4);
|
||||
|
||||
// Create a TextFile Dataset, with two text files
|
||||
// Note: 1.txt has 3 rows
|
||||
// Note: 2.txt has 2 rows
|
||||
// Use default of all samples
|
||||
// Set shuffle to files shuffle
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kFiles);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset.
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("text"), row.end());
|
||||
std::vector<std::string> expected_result = {"This is a text file.", "Another file.", "Be happy every day.",
|
||||
"End of file.", "Good luck to everyone."};
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["text"];
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
std::string_view sv;
|
||||
text->GetItemAt(&sv, {0});
|
||||
std::string ss(sv);
|
||||
MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50);
|
||||
// Compare against expected result
|
||||
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 2 + 3 = 5 samples
|
||||
EXPECT_EQ(i, 5);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
|
||||
// Restore configuration
|
||||
GlobalContext::config_manager()->set_seed(original_seed);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1A) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleGlobal1A.";
|
||||
// Test TextFile Dataset with 1 text file, global shuffle, num_parallel_workers=1
|
||||
|
@ -394,203 +568,3 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal4) {
|
|||
GlobalContext::config_manager()->set_seed(original_seed);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleFiles1.";
|
||||
// Test TextFile Dataset with files shuffle, num_parallel_workers=1
|
||||
|
||||
// Set configuration
|
||||
uint32_t original_seed = GlobalContext::config_manager()->seed();
|
||||
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
|
||||
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
|
||||
GlobalContext::config_manager()->set_seed(135);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(1);
|
||||
|
||||
// Create a TextFile Dataset, with two text files
|
||||
// Note: 1.txt has 3 rows
|
||||
// Note: 2.txt has 2 rows
|
||||
// Use default of all samples
|
||||
// Set shuffle to files shuffle
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kFiles);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset.
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("text"), row.end());
|
||||
std::vector<std::string> expected_result = {
|
||||
"This is a text file.", "Be happy every day.", "Good luck to everyone.", "Another file.", "End of file.",
|
||||
};
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["text"];
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
std::string_view sv;
|
||||
text->GetItemAt(&sv, {0});
|
||||
std::string ss(sv);
|
||||
MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50);
|
||||
// Compare against expected result
|
||||
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 2 + 3 = 5 samples
|
||||
EXPECT_EQ(i, 5);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
|
||||
// Restore configuration
|
||||
GlobalContext::config_manager()->set_seed(original_seed);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles4) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleFiles4.";
|
||||
// Test TextFile Dataset with files shuffle, num_parallel_workers=4
|
||||
|
||||
// Set configuration
|
||||
uint32_t original_seed = GlobalContext::config_manager()->seed();
|
||||
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
|
||||
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
|
||||
GlobalContext::config_manager()->set_seed(135);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(4);
|
||||
|
||||
// Create a TextFile Dataset, with two text files
|
||||
// Note: 1.txt has 3 rows
|
||||
// Note: 2.txt has 2 rows
|
||||
// Use default of all samples
|
||||
// Set shuffle to files shuffle
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kFiles);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset.
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
EXPECT_NE(row.find("text"), row.end());
|
||||
std::vector<std::string> expected_result = {"This is a text file.", "Another file.", "Be happy every day.",
|
||||
"End of file.", "Good luck to everyone."};
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto text = row["text"];
|
||||
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
|
||||
std::string_view sv;
|
||||
text->GetItemAt(&sv, {0});
|
||||
std::string ss(sv);
|
||||
MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50);
|
||||
// Compare against expected result
|
||||
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
|
||||
i++;
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
// Expect 2 + 3 = 5 samples
|
||||
EXPECT_EQ(i, 5);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
|
||||
// Restore configuration
|
||||
GlobalContext::config_manager()->set_seed(original_seed);
|
||||
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail1.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with invalid samplers=-1
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, -1);
|
||||
|
||||
// Expect failure: Number of samples cannot be negative
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail2) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail2.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with wrongful empty dataset_files input
|
||||
std::shared_ptr<Dataset> ds = TextFile({});
|
||||
|
||||
// Expect failure: dataset_files is not specified
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail3) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail3.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with non-existent dataset_files input
|
||||
std::shared_ptr<Dataset> ds = TextFile({"notexist.txt"}, 0, ShuffleMode::kFalse);
|
||||
|
||||
// Expect failure: specified dataset_files does not exist
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail4) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail4.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with empty string dataset_files input
|
||||
std::shared_ptr<Dataset> ds = TextFile({""}, 0, ShuffleMode::kFiles);
|
||||
|
||||
// Expect failure: specified dataset_files does not exist
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail5) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail5.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with invalid num_shards=0 value
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 1, ShuffleMode::kFalse, 0);
|
||||
|
||||
// Expect failure: Number of shards cannot be <=0
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail6) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail6.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with invalid shard_id=-1 value
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kFiles, -1);
|
||||
|
||||
// Expect failure: shard_id cannot be negative
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail7) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail7.";
|
||||
|
||||
// Attempt to create a TextFile Dataset
|
||||
// with invalid shard_id=2 and num_shards=2 combination
|
||||
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
|
||||
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kGlobal, 2, 2);
|
||||
|
||||
// Expect failure: Cannot have shard_id >= num_shards
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
|
|
@ -13,42 +13,52 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "utils/log_adapter.h"
|
||||
#include "utils/ms_utils.h"
|
||||
#include "common/common.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "securec.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
#include "minddata/dataset/include/status.h"
|
||||
#include "minddata/dataset/include/transforms.h"
|
||||
#include "minddata/dataset/include/iterator.h"
|
||||
#include "minddata/dataset/core/constants.h"
|
||||
#include "minddata/dataset/core/tensor_shape.h"
|
||||
#include "minddata/dataset/core/tensor.h"
|
||||
#include "minddata/dataset/include/samplers.h"
|
||||
|
||||
using namespace mindspore::dataset::api;
|
||||
using mindspore::MsLogLevel::ERROR;
|
||||
using mindspore::ExceptionType::NoExceptionType;
|
||||
using mindspore::LogStream;
|
||||
using mindspore::dataset::Tensor;
|
||||
using mindspore::dataset::TensorShape;
|
||||
using mindspore::dataset::TensorImpl;
|
||||
using mindspore::dataset::DataType;
|
||||
using mindspore::dataset::Status;
|
||||
using mindspore::dataset::BorderType;
|
||||
using mindspore::dataset::dsize_t;
|
||||
|
||||
class MindDataTestPipeline : public UT::DatasetOpTesting {
|
||||
protected:
|
||||
};
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestIteratorEmptyColumn) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorEmptyColumn.";
|
||||
// Create a Cifar10 Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testCifar10Data/";
|
||||
std::shared_ptr<Dataset> ds = Cifar10(folder_path, RandomSampler(false, 5));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Rename operation on ds
|
||||
ds = ds->Rename({"image", "label"}, {"col1", "col2"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// No columns are specified, use all columns
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::vector<std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
TensorShape expect0({32, 32, 3});
|
||||
TensorShape expect1({});
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
MS_LOG(INFO) << "row[0]:" << row[0]->shape() << ", row[1]:" << row[1]->shape();
|
||||
EXPECT_EQ(expect0, row[0]->shape());
|
||||
EXPECT_EQ(expect1, row[1]->shape());
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 5);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestIteratorOneColumn) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorOneColumn.";
|
||||
|
@ -89,6 +99,46 @@ TEST_F(MindDataTestPipeline, TestIteratorOneColumn) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestIteratorReOrder) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorReOrder.";
|
||||
// Create a Cifar10 Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testCifar10Data/";
|
||||
std::shared_ptr<Dataset> ds = Cifar10(folder_path, SequentialSampler(false, 4));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Take operation on ds
|
||||
ds = ds->Take(2);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// Reorder "image" and "label" column
|
||||
std::vector<std::string> columns = {"label", "image"};
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator(columns);
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::vector<std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
TensorShape expect0({32, 32, 3});
|
||||
TensorShape expect1({});
|
||||
|
||||
// Check if we will catch "label" before "image" in row
|
||||
std::vector<std::string> expect = {"label", "image"};
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
MS_LOG(INFO) << "row[0]:" << row[0]->shape() << ", row[1]:" << row[1]->shape();
|
||||
EXPECT_EQ(expect1, row[0]->shape());
|
||||
EXPECT_EQ(expect0, row[1]->shape());
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 2);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestIteratorTwoColumns) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorTwoColumns.";
|
||||
// Create a VOC Dataset
|
||||
|
@ -133,82 +183,6 @@ TEST_F(MindDataTestPipeline, TestIteratorTwoColumns) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestIteratorEmptyColumn) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorEmptyColumn.";
|
||||
// Create a Cifar10 Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testCifar10Data/";
|
||||
std::shared_ptr<Dataset> ds = Cifar10(folder_path, RandomSampler(false, 5));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Rename operation on ds
|
||||
ds = ds->Rename({"image", "label"}, {"col1", "col2"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// No columns are specified, use all columns
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::vector<std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
TensorShape expect0({32, 32, 3});
|
||||
TensorShape expect1({});
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
MS_LOG(INFO) << "row[0]:" << row[0]->shape() << ", row[1]:" << row[1]->shape();
|
||||
EXPECT_EQ(expect0, row[0]->shape());
|
||||
EXPECT_EQ(expect1, row[1]->shape());
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 5);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestIteratorReOrder) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorReOrder.";
|
||||
// Create a Cifar10 Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testCifar10Data/";
|
||||
std::shared_ptr<Dataset> ds = Cifar10(folder_path, SequentialSampler(false, 4));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Take operation on ds
|
||||
ds = ds->Take(2);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// Reorder "image" and "label" column
|
||||
std::vector<std::string> columns = {"label", "image"};
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator(columns);
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::vector<std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
TensorShape expect0({32, 32, 3});
|
||||
TensorShape expect1({});
|
||||
|
||||
// Check if we will catch "label" before "image" in row
|
||||
std::vector<std::string> expect = {"label", "image"};
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
MS_LOG(INFO) << "row[0]:" << row[0]->shape() << ", row[1]:" << row[1]->shape();
|
||||
EXPECT_EQ(expect1, row[0]->shape());
|
||||
EXPECT_EQ(expect0, row[1]->shape());
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 2);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestIteratorWrongColumn) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorOneColumn.";
|
||||
// Create a Mnist Dataset
|
||||
|
|
|
@ -13,37 +13,12 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "utils/log_adapter.h"
|
||||
#include "utils/ms_utils.h"
|
||||
#include "common/common.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "securec.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
#include "minddata/dataset/include/status.h"
|
||||
#include "minddata/dataset/include/transforms.h"
|
||||
#include "minddata/dataset/include/iterator.h"
|
||||
#include "minddata/dataset/core/constants.h"
|
||||
#include "minddata/dataset/core/tensor_shape.h"
|
||||
#include "minddata/dataset/core/tensor.h"
|
||||
#include "minddata/dataset/include/samplers.h"
|
||||
|
||||
using namespace mindspore::dataset::api;
|
||||
using mindspore::MsLogLevel::ERROR;
|
||||
using mindspore::ExceptionType::NoExceptionType;
|
||||
using mindspore::LogStream;
|
||||
using mindspore::dataset::Tensor;
|
||||
using mindspore::dataset::TensorShape;
|
||||
using mindspore::dataset::TensorImpl;
|
||||
using mindspore::dataset::DataType;
|
||||
using mindspore::dataset::Status;
|
||||
using mindspore::dataset::BorderType;
|
||||
using mindspore::dataset::dsize_t;
|
||||
|
||||
class MindDataTestPipeline : public UT::DatasetOpTesting {
|
||||
protected:
|
||||
|
@ -90,28 +65,77 @@ TEST_F(MindDataTestPipeline, TestBatchAndRepeat) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTensorOpsAndMap) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTensorOpsAndMap.";
|
||||
|
||||
// Create a Mnist Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testMnistData/";
|
||||
std::shared_ptr<Dataset> ds = Mnist(folder_path, RandomSampler(false, 20));
|
||||
TEST_F(MindDataTestPipeline, TestConcatFail1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatFail1.";
|
||||
// This case is expected to fail because the input column names of concatenated datasets are not the same
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
// Column names: {"image", "label"}
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
std::shared_ptr<Dataset> ds2 = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
// Create a Rename operation on ds
|
||||
ds2 = ds2->Rename({"image", "label"}, {"col1", "col2"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> resize_op = vision::Resize({30, 30});
|
||||
EXPECT_NE(resize_op, nullptr);
|
||||
// Create a Project operation on the ds
|
||||
// Name of datasets to concat doesn't not match
|
||||
ds = ds->Concat({ds2});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> center_crop_op = vision::CenterCrop({16, 16});
|
||||
EXPECT_NE(center_crop_op, nullptr);
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({resize_op, center_crop_op});
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_EQ(iter, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestConcatFail2) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatFail2.";
|
||||
// This case is expected to fail because the input dataset is empty.
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Project operation on the ds
|
||||
// Input dataset to concat is empty
|
||||
ds = ds->Concat({});
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestConcatSuccess) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatSuccess.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
// Column names: {"image", "label"}
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Cifar10 Dataset
|
||||
// Column names: {"image", "label"}
|
||||
folder_path = datasets_root_path_ + "/testCifar10Data/";
|
||||
std::shared_ptr<Dataset> ds2 = Cifar10(folder_path, RandomSampler(false, 9));
|
||||
EXPECT_NE(ds2, nullptr);
|
||||
|
||||
// Create a Project operation on ds
|
||||
ds = ds->Project({"image"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
ds2 = ds2->Project({"image"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Concat operation on the ds
|
||||
ds = ds->Concat({ds2});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
|
@ -127,7 +151,6 @@ TEST_F(MindDataTestPipeline, TestTensorOpsAndMap) {
|
|||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
|
@ -136,8 +159,59 @@ TEST_F(MindDataTestPipeline, TestTensorOpsAndMap) {
|
|||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 40);
|
||||
EXPECT_EQ(i, 19);
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestConcatSuccess2) {
|
||||
// Test "+" operator to concat two datasets
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatSuccess2.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
// Column names: {"image", "label"}
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Cifar10 Dataset
|
||||
// Column names: {"image", "label"}
|
||||
folder_path = datasets_root_path_ + "/testCifar10Data/";
|
||||
std::shared_ptr<Dataset> ds2 = Cifar10(folder_path, RandomSampler(false, 9));
|
||||
EXPECT_NE(ds2, nullptr);
|
||||
|
||||
// Create a Project operation on ds
|
||||
ds = ds->Project({"image"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
ds2 = ds2->Project({"image"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Concat operation on the ds
|
||||
ds = ds + ds2;
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 19);
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
@ -183,6 +257,180 @@ TEST_F(MindDataTestPipeline, TestImageFolderBatchAndRepeat) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestProjectMap) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestProjectMap.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> random_vertical_flip_op = vision::RandomVerticalFlip(0.5);
|
||||
EXPECT_NE(random_vertical_flip_op, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({random_vertical_flip_op}, {}, {}, {"image", "label"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Project operation on ds
|
||||
std::vector<std::string> column_project = {"image"};
|
||||
ds = ds->Project(column_project);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestProjectMapAutoInjection) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline.TestProjectMapAutoInjection";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> resize_op = vision::Resize({30, 30});
|
||||
EXPECT_NE(resize_op, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
// {"image"} is the project columns. This will trigger auto injection of ProjectOp after MapOp.
|
||||
ds = ds->Map({resize_op}, {}, {}, {"image"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
// 'label' is dropped during the project op
|
||||
EXPECT_EQ(row.find("label"), row.end());
|
||||
// 'image' column should still exist
|
||||
EXPECT_NE(row.find("image"), row.end());
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
EXPECT_EQ(image->shape()[0], 30);
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRenameFail) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRenameFail.";
|
||||
// We expect this test to fail because input and output in Rename are not the same size
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Rename operation on ds
|
||||
ds = ds->Rename({"image", "label"}, {"col2"});
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRenameSuccess) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRenameSuccess.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Rename operation on ds
|
||||
ds = ds->Rename({"image", "label"}, {"col1", "col2"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
EXPECT_NE(row.find("col1"), row.end());
|
||||
EXPECT_NE(row.find("col2"), row.end());
|
||||
EXPECT_EQ(row.find("image"), row.end());
|
||||
EXPECT_EQ(row.find("label"), row.end());
|
||||
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["col1"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestShuffleDataset) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestShuffleDataset.";
|
||||
|
||||
|
@ -319,6 +567,21 @@ TEST_F(MindDataTestPipeline, TestTakeDatasetDefault) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTakeDatasetError1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTakeDatasetError1.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Take operation on ds with invalid count input
|
||||
int32_t count = -5;
|
||||
ds = ds->Take(count);
|
||||
// Expect nullptr for invalid input take_count
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTakeDatasetNormal) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTakeDatasetNormal.";
|
||||
|
||||
|
@ -356,27 +619,12 @@ TEST_F(MindDataTestPipeline, TestTakeDatasetNormal) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestTakeDatasetError1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTakeDatasetError1.";
|
||||
TEST_F(MindDataTestPipeline, TestTensorOpsAndMap) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTensorOpsAndMap.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Take operation on ds with invalid count input
|
||||
int32_t count = -5;
|
||||
ds = ds->Take(count);
|
||||
// Expect nullptr for invalid input take_count
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestProjectMap) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestProjectMap.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
// Create a Mnist Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testMnistData/";
|
||||
std::shared_ptr<Dataset> ds = Mnist(folder_path, RandomSampler(false, 20));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
|
@ -385,16 +633,14 @@ TEST_F(MindDataTestPipeline, TestProjectMap) {
|
|||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> random_vertical_flip_op = vision::RandomVerticalFlip(0.5);
|
||||
EXPECT_NE(random_vertical_flip_op, nullptr);
|
||||
std::shared_ptr<TensorOperation> resize_op = vision::Resize({30, 30});
|
||||
EXPECT_NE(resize_op, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> center_crop_op = vision::CenterCrop({16, 16});
|
||||
EXPECT_NE(center_crop_op, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({random_vertical_flip_op}, {}, {}, {"image", "label"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Project operation on ds
|
||||
std::vector<std::string> column_project = {"image"};
|
||||
ds = ds->Project(column_project);
|
||||
ds = ds->Map({resize_op, center_crop_op});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
|
@ -419,62 +665,55 @@ TEST_F(MindDataTestPipeline, TestProjectMap) {
|
|||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
EXPECT_EQ(i, 40);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestProjectMapAutoInjection) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline.TestProjectMapAutoInjection";
|
||||
TEST_F(MindDataTestPipeline, TestZipFail) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestZipFail.";
|
||||
// We expect this test to fail because we are the both datasets we are zipping have "image" and "label" columns
|
||||
// and zip doesn't accept datasets with same column names
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
// Create an ImageFolder Dataset
|
||||
std::shared_ptr<Dataset> ds1 = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds1, nullptr);
|
||||
|
||||
// Create a Zip operation on the datasets
|
||||
ds = Zip({ds, ds1});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> resize_op = vision::Resize({30, 30});
|
||||
EXPECT_NE(resize_op, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
// {"image"} is the project columns. This will trigger auto injection of ProjectOp after MapOp.
|
||||
ds = ds->Map({resize_op}, {}, {}, {"image"});
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
EXPECT_EQ(iter, nullptr);
|
||||
}
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
TEST_F(MindDataTestPipeline, TestZipFail2) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestZipFail2.";
|
||||
// This case is expected to fail because the input dataset is empty.
|
||||
|
||||
// 'label' is dropped during the project op
|
||||
EXPECT_EQ(row.find("label"), row.end());
|
||||
// 'image' column should still exist
|
||||
EXPECT_NE(row.find("image"), row.end());
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
EXPECT_EQ(image->shape()[0], 30);
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
// Create a Zip operation on the datasets
|
||||
// Input dataset to zip is empty
|
||||
ds = Zip({});
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestZipSuccess) {
|
||||
|
@ -600,266 +839,3 @@ TEST_F(MindDataTestPipeline, TestZipSuccess2) {
|
|||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestZipFail) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestZipFail.";
|
||||
// We expect this test to fail because we are the both datasets we are zipping have "image" and "label" columns
|
||||
// and zip doesn't accept datasets with same column names
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::shared_ptr<Dataset> ds1 = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds1, nullptr);
|
||||
|
||||
// Create a Zip operation on the datasets
|
||||
ds = Zip({ds, ds1});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_EQ(iter, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestZipFail2) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestZipFail2.";
|
||||
// This case is expected to fail because the input dataset is empty.
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Zip operation on the datasets
|
||||
// Input dataset to zip is empty
|
||||
ds = Zip({});
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRenameSuccess) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRenameSuccess.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Rename operation on ds
|
||||
ds = ds->Rename({"image", "label"}, {"col1", "col2"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
EXPECT_NE(row.find("col1"), row.end());
|
||||
EXPECT_NE(row.find("col2"), row.end());
|
||||
EXPECT_EQ(row.find("image"), row.end());
|
||||
EXPECT_EQ(row.find("label"), row.end());
|
||||
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["col1"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRenameFail) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRenameFail.";
|
||||
// We expect this test to fail because input and output in Rename are not the same size
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Rename operation on ds
|
||||
ds = ds->Rename({"image", "label"}, {"col2"});
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestConcatSuccess) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatSuccess.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
// Column names: {"image", "label"}
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Cifar10 Dataset
|
||||
// Column names: {"image", "label"}
|
||||
folder_path = datasets_root_path_ + "/testCifar10Data/";
|
||||
std::shared_ptr<Dataset> ds2 = Cifar10(folder_path, RandomSampler(false, 9));
|
||||
EXPECT_NE(ds2, nullptr);
|
||||
|
||||
// Create a Project operation on ds
|
||||
ds = ds->Project({"image"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
ds2 = ds2->Project({"image"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Concat operation on the ds
|
||||
ds = ds->Concat({ds2});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 19);
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestConcatSuccess2) {
|
||||
// Test "+" operator to concat two datasets
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatSuccess2.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
// Column names: {"image", "label"}
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Cifar10 Dataset
|
||||
// Column names: {"image", "label"}
|
||||
folder_path = datasets_root_path_ + "/testCifar10Data/";
|
||||
std::shared_ptr<Dataset> ds2 = Cifar10(folder_path, RandomSampler(false, 9));
|
||||
EXPECT_NE(ds2, nullptr);
|
||||
|
||||
// Create a Project operation on ds
|
||||
ds = ds->Project({"image"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
ds2 = ds2->Project({"image"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Concat operation on the ds
|
||||
ds = ds + ds2;
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 19);
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestConcatFail1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatFail1.";
|
||||
// This case is expected to fail because the input column names of concatenated datasets are not the same
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
// Column names: {"image", "label"}
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
std::shared_ptr<Dataset> ds2 = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Rename operation on ds
|
||||
ds2 = ds2->Rename({"image", "label"}, {"col1", "col2"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Project operation on the ds
|
||||
// Name of datasets to concat doesn't not match
|
||||
ds = ds->Concat({ds2});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_EQ(iter, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestConcatFail2) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatFail2.";
|
||||
// This case is expected to fail because the input dataset is empty.
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Project operation on the ds
|
||||
// Input dataset to concat is empty
|
||||
ds = ds->Concat({});
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
|
|
@ -13,43 +13,126 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "utils/log_adapter.h"
|
||||
#include "utils/ms_utils.h"
|
||||
#include "common/common.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "securec.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
#include "minddata/dataset/include/status.h"
|
||||
#include "minddata/dataset/include/transforms.h"
|
||||
#include "minddata/dataset/include/iterator.h"
|
||||
#include "minddata/dataset/core/constants.h"
|
||||
#include "minddata/dataset/core/tensor_shape.h"
|
||||
#include "minddata/dataset/core/tensor.h"
|
||||
#include "minddata/dataset/include/samplers.h"
|
||||
#include "minddata/dataset/engine/datasetops/source/voc_op.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
|
||||
using namespace mindspore::dataset::api;
|
||||
using mindspore::MsLogLevel::ERROR;
|
||||
using mindspore::ExceptionType::NoExceptionType;
|
||||
using mindspore::LogStream;
|
||||
using mindspore::dataset::Tensor;
|
||||
using mindspore::dataset::TensorShape;
|
||||
using mindspore::dataset::TensorImpl;
|
||||
using mindspore::dataset::DataType;
|
||||
using mindspore::dataset::Status;
|
||||
using mindspore::dataset::BorderType;
|
||||
using mindspore::dataset::dsize_t;
|
||||
|
||||
class MindDataTestPipeline : public UT::DatasetOpTesting {
|
||||
protected:
|
||||
};
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestVOCClassIndex) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCClassIndex.";
|
||||
|
||||
// Create a VOC Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
|
||||
std::map<std::string, int32_t> class_index;
|
||||
class_index["car"] = 0;
|
||||
class_index["cat"] = 1;
|
||||
class_index["train"] = 9;
|
||||
|
||||
std::shared_ptr<Dataset> ds = VOC(folder_path, "Detection", "train", class_index, false, SequentialSampler(0, 6));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
// Check if VOCOp read correct labels
|
||||
// When we provide class_index, label of ["car","cat","train"] become [0,1,9]
|
||||
std::shared_ptr<Tensor> expect_label;
|
||||
Tensor::CreateFromMemory(TensorShape({1, 1}), DataType(DataType::DE_UINT32), nullptr, &expect_label);
|
||||
|
||||
uint32_t expect[] = {9, 9, 9, 1, 1, 0};
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto image = row["image"];
|
||||
auto label = row["label"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
MS_LOG(INFO) << "Tensor label shape: " << label->shape();
|
||||
expect_label->SetItemAt({0, 0}, expect[i]);
|
||||
EXPECT_EQ(*label, *expect_label);
|
||||
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 6);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestVOCDetection) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCDetection.";
|
||||
|
||||
// Create a VOC Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
|
||||
std::shared_ptr<Dataset> ds = VOC(folder_path, "Detection", "train", {}, false, SequentialSampler(0, 4));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
// Check if VOCOp read correct images/labels
|
||||
std::string expect_file[] = {"15", "32", "33", "39"};
|
||||
uint32_t expect_num[] = {5, 5, 4, 3};
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto image = row["image"];
|
||||
auto label = row["label"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
MS_LOG(INFO) << "Tensor label shape: " << label->shape();
|
||||
|
||||
std::shared_ptr<Tensor> expect_image;
|
||||
Tensor::CreateFromFile(folder_path + "/JPEGImages/" + expect_file[i] + ".jpg", &expect_image);
|
||||
EXPECT_EQ(*image, *expect_image);
|
||||
|
||||
std::shared_ptr<Tensor> expect_label;
|
||||
Tensor::CreateFromMemory(TensorShape({1, 1}), DataType(DataType::DE_UINT32), nullptr, &expect_label);
|
||||
expect_label->SetItemAt({0, 0}, expect_num[i]);
|
||||
EXPECT_EQ(*label, *expect_label);
|
||||
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 4);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestVOCInvalidTaskOrMode) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCInvalidTaskOrMode.";
|
||||
|
||||
// Create a VOC Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
|
||||
std::shared_ptr<Dataset> ds_1 = VOC(folder_path, "Classification", "train", {}, false, SequentialSampler(0, 3));
|
||||
// Expect nullptr for invalid task
|
||||
EXPECT_EQ(ds_1, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds_2 = VOC(folder_path, "Segmentation", "validation", {}, false, RandomSampler(false, 4));
|
||||
// Expect nullptr for invalid mode
|
||||
EXPECT_EQ(ds_2, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestVOCSegmentation) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCSegmentation.";
|
||||
|
||||
|
@ -112,110 +195,3 @@ TEST_F(MindDataTestPipeline, TestVOCSegmentationError1) {
|
|||
// Expect nullptr for segmentation task with class_index
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestVOCInvalidTaskOrMode) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCInvalidTaskOrMode.";
|
||||
|
||||
// Create a VOC Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
|
||||
std::shared_ptr<Dataset> ds_1 = VOC(folder_path, "Classification", "train", {}, false, SequentialSampler(0, 3));
|
||||
// Expect nullptr for invalid task
|
||||
EXPECT_EQ(ds_1, nullptr);
|
||||
|
||||
std::shared_ptr<Dataset> ds_2 = VOC(folder_path, "Segmentation", "validation", {}, false, RandomSampler(false, 4));
|
||||
// Expect nullptr for invalid mode
|
||||
EXPECT_EQ(ds_2, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestVOCDetection) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCDetection.";
|
||||
|
||||
// Create a VOC Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
|
||||
std::shared_ptr<Dataset> ds = VOC(folder_path, "Detection", "train", {}, false, SequentialSampler(0, 4));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
// Check if VOCOp read correct images/labels
|
||||
std::string expect_file[] = {"15", "32", "33", "39"};
|
||||
uint32_t expect_num[] = {5, 5, 4, 3};
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto image = row["image"];
|
||||
auto label = row["label"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
MS_LOG(INFO) << "Tensor label shape: " << label->shape();
|
||||
|
||||
std::shared_ptr<Tensor> expect_image;
|
||||
Tensor::CreateFromFile(folder_path + "/JPEGImages/" + expect_file[i] + ".jpg", &expect_image);
|
||||
EXPECT_EQ(*image, *expect_image);
|
||||
|
||||
std::shared_ptr<Tensor> expect_label;
|
||||
Tensor::CreateFromMemory(TensorShape({1, 1}), DataType(DataType::DE_UINT32), nullptr, &expect_label);
|
||||
expect_label->SetItemAt({0, 0}, expect_num[i]);
|
||||
EXPECT_EQ(*label, *expect_label);
|
||||
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 4);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestVOCClassIndex) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCClassIndex.";
|
||||
|
||||
// Create a VOC Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
|
||||
std::map<std::string, int32_t> class_index;
|
||||
class_index["car"] = 0;
|
||||
class_index["cat"] = 1;
|
||||
class_index["train"] = 9;
|
||||
|
||||
std::shared_ptr<Dataset> ds = VOC(folder_path, "Detection", "train", class_index, false, SequentialSampler(0, 6));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
// Check if VOCOp read correct labels
|
||||
// When we provide class_index, label of ["car","cat","train"] become [0,1,9]
|
||||
std::shared_ptr<Tensor> expect_label;
|
||||
Tensor::CreateFromMemory(TensorShape({1, 1}), DataType(DataType::DE_UINT32), nullptr, &expect_label);
|
||||
|
||||
uint32_t expect[] = {9, 9, 9, 1, 1, 0};
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
auto image = row["image"];
|
||||
auto label = row["label"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
MS_LOG(INFO) << "Tensor label shape: " << label->shape();
|
||||
expect_label->SetItemAt({0, 0}, expect[i]);
|
||||
EXPECT_EQ(*label, *expect_label);
|
||||
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 6);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
|
|
@ -13,58 +13,17 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "utils/log_adapter.h"
|
||||
#include "utils/ms_utils.h"
|
||||
#include "common/common.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "securec.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
#include "minddata/dataset/include/status.h"
|
||||
#include "minddata/dataset/include/transforms.h"
|
||||
#include "minddata/dataset/include/iterator.h"
|
||||
#include "minddata/dataset/core/constants.h"
|
||||
#include "minddata/dataset/core/tensor_shape.h"
|
||||
#include "minddata/dataset/core/tensor.h"
|
||||
#include "minddata/dataset/include/samplers.h"
|
||||
|
||||
using namespace mindspore::dataset::api;
|
||||
using mindspore::MsLogLevel::ERROR;
|
||||
using mindspore::ExceptionType::NoExceptionType;
|
||||
using mindspore::LogStream;
|
||||
using mindspore::dataset::Tensor;
|
||||
using mindspore::dataset::TensorShape;
|
||||
using mindspore::dataset::TensorImpl;
|
||||
using mindspore::dataset::DataType;
|
||||
using mindspore::dataset::Status;
|
||||
using mindspore::dataset::BorderType;
|
||||
using mindspore::dataset::dsize_t;
|
||||
|
||||
class MindDataTestPipeline : public UT::DatasetOpTesting {
|
||||
protected:
|
||||
};
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestMnistFail1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestMnistFail1.";
|
||||
|
||||
// Create a Mnist Dataset
|
||||
std::shared_ptr<Dataset> ds = Mnist("", RandomSampler(false, 10));
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestImageFolderFail1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderFail1.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::shared_ptr<Dataset> ds = ImageFolder("", true, nullptr);
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCelebADataset) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCelebADataset.";
|
||||
|
||||
|
@ -158,3 +117,19 @@ TEST_F(MindDataTestPipeline, TestCelebAException) {
|
|||
std::shared_ptr<Dataset> ds1 = CelebA(folder_path, invalid_dataset_type);
|
||||
EXPECT_EQ(ds1, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestImageFolderFail1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderFail1.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::shared_ptr<Dataset> ds = ImageFolder("", true, nullptr);
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestMnistFail1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestMnistFail1.";
|
||||
|
||||
// Create a Mnist Dataset
|
||||
std::shared_ptr<Dataset> ds = Mnist("", RandomSampler(false, 10));
|
||||
EXPECT_EQ(ds, nullptr);
|
||||
}
|
||||
|
|
|
@ -13,33 +13,11 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "utils/log_adapter.h"
|
||||
#include "utils/ms_utils.h"
|
||||
#include "common/common.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "securec.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
#include "minddata/dataset/include/status.h"
|
||||
#include "minddata/dataset/include/transforms.h"
|
||||
#include "minddata/dataset/include/iterator.h"
|
||||
#include "minddata/dataset/core/constants.h"
|
||||
#include "minddata/dataset/core/tensor_shape.h"
|
||||
#include "minddata/dataset/core/tensor.h"
|
||||
#include "minddata/dataset/include/samplers.h"
|
||||
|
||||
using namespace mindspore::dataset::api;
|
||||
using mindspore::MsLogLevel::ERROR;
|
||||
using mindspore::ExceptionType::NoExceptionType;
|
||||
using mindspore::LogStream;
|
||||
using mindspore::dataset::Tensor;
|
||||
using mindspore::dataset::Status;
|
||||
using mindspore::dataset::BorderType;
|
||||
|
||||
class MindDataTestPipeline : public UT::DatasetOpTesting {
|
||||
protected:
|
||||
|
|
|
@ -13,64 +13,43 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
#include "utils/log_adapter.h"
|
||||
#include "utils/ms_utils.h"
|
||||
#include "common/common.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "securec.h"
|
||||
#include "minddata/dataset/include/datasets.h"
|
||||
#include "minddata/dataset/include/status.h"
|
||||
#include "minddata/dataset/include/transforms.h"
|
||||
#include "minddata/dataset/include/iterator.h"
|
||||
#include "minddata/dataset/core/constants.h"
|
||||
#include "minddata/dataset/core/tensor_shape.h"
|
||||
#include "minddata/dataset/core/tensor.h"
|
||||
#include "minddata/dataset/include/samplers.h"
|
||||
|
||||
using namespace mindspore::dataset::api;
|
||||
using mindspore::LogStream;
|
||||
using mindspore::dataset::BorderType;
|
||||
using mindspore::dataset::Status;
|
||||
using mindspore::dataset::Tensor;
|
||||
using mindspore::ExceptionType::NoExceptionType;
|
||||
using mindspore::MsLogLevel::ERROR;
|
||||
|
||||
class MindDataTestPipeline : public UT::DatasetOpTesting {
|
||||
protected:
|
||||
};
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestUniformAugWithOps) {
|
||||
// Create a Mnist Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testMnistData/";
|
||||
std::shared_ptr<Dataset> ds = Mnist(folder_path, RandomSampler(false, 20));
|
||||
TEST_F(MindDataTestPipeline, TestCutOut) {
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 1;
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> resize_op = vision::Resize({30, 30});
|
||||
EXPECT_NE(resize_op, nullptr);
|
||||
std::shared_ptr<TensorOperation> cut_out1 = vision::CutOut(30, 5);
|
||||
EXPECT_NE(cut_out1, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> random_crop_op = vision::RandomCrop({28, 28});
|
||||
EXPECT_NE(random_crop_op, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> center_crop_op = vision::CenterCrop({16, 16});
|
||||
EXPECT_NE(center_crop_op, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> uniform_aug_op = vision::UniformAugment({random_crop_op, center_crop_op}, 2);
|
||||
EXPECT_NE(uniform_aug_op, nullptr);
|
||||
std::shared_ptr<TensorOperation> cut_out2 = vision::CutOut(30);
|
||||
EXPECT_NE(cut_out2, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({resize_op, uniform_aug_op});
|
||||
ds = ds->Map({cut_out1, cut_out2});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
|
@ -84,10 +63,10 @@ TEST_F(MindDataTestPipeline, TestUniformAugWithOps) {
|
|||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
@ -96,10 +75,10 @@ TEST_F(MindDataTestPipeline, TestUniformAugWithOps) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRandomFlip) {
|
||||
TEST_F(MindDataTestPipeline, TestDecode) {
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, false, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
|
@ -108,14 +87,11 @@ TEST_F(MindDataTestPipeline, TestRandomFlip) {
|
|||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> random_vertical_flip_op = vision::RandomVerticalFlip(0.5);
|
||||
EXPECT_NE(random_vertical_flip_op, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> random_horizontal_flip_op = vision::RandomHorizontalFlip(0.5);
|
||||
EXPECT_NE(random_horizontal_flip_op, nullptr);
|
||||
std::shared_ptr<TensorOperation> decode = vision::Decode(true);
|
||||
EXPECT_NE(decode, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({random_vertical_flip_op, random_horizontal_flip_op});
|
||||
ds = ds->Map({decode});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
|
@ -139,13 +115,84 @@ TEST_F(MindDataTestPipeline, TestRandomFlip) {
|
|||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestHwcToChw) {
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> channel_swap = vision::HWC2CHW();
|
||||
EXPECT_NE(channel_swap, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({channel_swap});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
// check if the image is in NCHW
|
||||
EXPECT_EQ(batch_size == image->shape()[0] && 3 == image->shape()[1]
|
||||
&& 2268 == image->shape()[2] && 4032 == image->shape()[3], true);
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestMixUpBatchFail1) {
|
||||
// Create a Cifar10 Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testCifar10Data/";
|
||||
std::shared_ptr<Dataset> ds = Cifar10(folder_path, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 5;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> one_hot_op = vision::OneHot(10);
|
||||
EXPECT_NE(one_hot_op, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({one_hot_op},{"label"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> mixup_batch_op = vision::MixUpBatch(-1);
|
||||
EXPECT_EQ(mixup_batch_op, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestMixUpBatchSuccess1) {
|
||||
// Create a Cifar10 Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testCifar10Data/";
|
||||
|
@ -244,132 +291,6 @@ TEST_F(MindDataTestPipeline, TestMixUpBatchSuccess2) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestMixUpBatchFail1) {
|
||||
// Create a Cifar10 Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testCifar10Data/";
|
||||
std::shared_ptr<Dataset> ds = Cifar10(folder_path, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 5;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> one_hot_op = vision::OneHot(10);
|
||||
EXPECT_NE(one_hot_op, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({one_hot_op},{"label"});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> mixup_batch_op = vision::MixUpBatch(-1);
|
||||
EXPECT_EQ(mixup_batch_op, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestPad) {
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> pad_op1 = vision::Pad({1, 2, 3, 4}, {0}, BorderType::kSymmetric);
|
||||
EXPECT_NE(pad_op1, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> pad_op2 = vision::Pad({1}, {1, 1, 1}, BorderType::kEdge);
|
||||
EXPECT_NE(pad_op2, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> pad_op3 = vision::Pad({1, 4});
|
||||
EXPECT_NE(pad_op3, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({pad_op1, pad_op2, pad_op3});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestCutOut) {
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> cut_out1 = vision::CutOut(30, 5);
|
||||
EXPECT_NE(cut_out1, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> cut_out2 = vision::CutOut(30);
|
||||
EXPECT_NE(cut_out2, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({cut_out1, cut_out2});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestNormalize) {
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
|
@ -417,53 +338,7 @@ TEST_F(MindDataTestPipeline, TestNormalize) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestDecode) {
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, false, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> decode = vision::Decode(true);
|
||||
EXPECT_NE(decode, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({decode});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestHwcToChw) {
|
||||
TEST_F(MindDataTestPipeline, TestPad) {
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
|
@ -475,11 +350,81 @@ TEST_F(MindDataTestPipeline, TestHwcToChw) {
|
|||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> channel_swap = vision::HWC2CHW();
|
||||
EXPECT_NE(channel_swap, nullptr);
|
||||
std::shared_ptr<TensorOperation> pad_op1 = vision::Pad({1, 2, 3, 4}, {0}, BorderType::kSymmetric);
|
||||
EXPECT_NE(pad_op1, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> pad_op2 = vision::Pad({1}, {1, 1, 1}, BorderType::kEdge);
|
||||
EXPECT_NE(pad_op2, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> pad_op3 = vision::Pad({1, 4});
|
||||
EXPECT_NE(pad_op3, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({channel_swap});
|
||||
ds = ds->Map({pad_op1, pad_op2, pad_op3});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRandomAffineFail) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRandomAffineFail with invalid params.";
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> affine = vision::RandomAffine({0.0, 0.0}, {});
|
||||
EXPECT_EQ(affine, nullptr);
|
||||
// Invalid number of values for translate
|
||||
affine = vision::RandomAffine({0.0, 0.0}, {1, 1, 1, 1});
|
||||
EXPECT_EQ(affine, nullptr);
|
||||
// Invalid number of values for shear
|
||||
affine = vision::RandomAffine({30.0, 30.0}, {0.0, 0.0}, {2.0, 2.0}, {10.0, 10.0});
|
||||
EXPECT_EQ(affine, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRandomAffineSuccess1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRandomAffineSuccess1 with non-default params.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> affine =
|
||||
vision::RandomAffine({30.0, 30.0}, {0.0, 0.0}, {2.0, 2.0}, {10.0, 10.0, 20.0, 20.0});
|
||||
EXPECT_NE(affine, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({affine});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
|
@ -501,11 +446,58 @@ TEST_F(MindDataTestPipeline, TestHwcToChw) {
|
|||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
// check if the image is in NCHW
|
||||
EXPECT_EQ(batch_size == image->shape()[0] && 3 == image->shape()[1]
|
||||
&& 2268 == image->shape()[2] && 4032 == image->shape()[3], true);
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRandomAffineSuccess2) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRandomAffineSuccess2 with default params.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> affine = vision::RandomAffine({0.0, 0.0});
|
||||
EXPECT_NE(affine, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({affine});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
|
@ -628,9 +620,7 @@ TEST_F(MindDataTestPipeline, TestRandomSharpness) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRandomAffineSuccess1) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRandomAffineSuccess1 with non-default params.";
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRandomFlip) {
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
|
@ -642,12 +632,14 @@ TEST_F(MindDataTestPipeline, TestRandomAffineSuccess1) {
|
|||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> affine =
|
||||
vision::RandomAffine({30.0, 30.0}, {0.0, 0.0}, {2.0, 2.0}, {10.0, 10.0, 20.0, 20.0});
|
||||
EXPECT_NE(affine, nullptr);
|
||||
std::shared_ptr<TensorOperation> random_vertical_flip_op = vision::RandomVerticalFlip(0.5);
|
||||
EXPECT_NE(random_vertical_flip_op, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> random_horizontal_flip_op = vision::RandomHorizontalFlip(0.5);
|
||||
EXPECT_NE(random_horizontal_flip_op, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({affine});
|
||||
ds = ds->Map({random_vertical_flip_op, random_horizontal_flip_op});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
|
@ -678,69 +670,6 @@ TEST_F(MindDataTestPipeline, TestRandomAffineSuccess1) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRandomAffineSuccess2) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRandomAffineSuccess2 with default params.";
|
||||
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, RandomSampler(false, 10));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 2;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> affine = vision::RandomAffine({0.0, 0.0});
|
||||
EXPECT_NE(affine, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({affine});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Batch operation on ds
|
||||
int32_t batch_size = 1;
|
||||
ds = ds->Batch(batch_size);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRandomAffineFail) {
|
||||
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRandomAffineFail with invalid params.";
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> affine = vision::RandomAffine({0.0, 0.0}, {});
|
||||
EXPECT_EQ(affine, nullptr);
|
||||
// Invalid number of values for translate
|
||||
affine = vision::RandomAffine({0.0, 0.0}, {1, 1, 1, 1});
|
||||
EXPECT_EQ(affine, nullptr);
|
||||
// Invalid number of values for shear
|
||||
affine = vision::RandomAffine({30.0, 30.0}, {0.0, 0.0}, {2.0, 2.0}, {10.0, 10.0});
|
||||
EXPECT_EQ(affine, nullptr);
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRandomRotation) {
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
|
@ -788,6 +717,57 @@ TEST_F(MindDataTestPipeline, TestRandomRotation) {
|
|||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestUniformAugWithOps) {
|
||||
// Create a Mnist Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testMnistData/";
|
||||
std::shared_ptr<Dataset> ds = Mnist(folder_path, RandomSampler(false, 20));
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create a Repeat operation on ds
|
||||
int32_t repeat_num = 1;
|
||||
ds = ds->Repeat(repeat_num);
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create objects for the tensor ops
|
||||
std::shared_ptr<TensorOperation> resize_op = vision::Resize({30, 30});
|
||||
EXPECT_NE(resize_op, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> random_crop_op = vision::RandomCrop({28, 28});
|
||||
EXPECT_NE(random_crop_op, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> center_crop_op = vision::CenterCrop({16, 16});
|
||||
EXPECT_NE(center_crop_op, nullptr);
|
||||
|
||||
std::shared_ptr<TensorOperation> uniform_aug_op = vision::UniformAugment({random_crop_op, center_crop_op}, 2);
|
||||
EXPECT_NE(uniform_aug_op, nullptr);
|
||||
|
||||
// Create a Map operation on ds
|
||||
ds = ds->Map({resize_op, uniform_aug_op});
|
||||
EXPECT_NE(ds, nullptr);
|
||||
|
||||
// Create an iterator over the result of the above dataset
|
||||
// This will trigger the creation of the Execution Tree and launch it.
|
||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
||||
EXPECT_NE(iter, nullptr);
|
||||
|
||||
// Iterate the dataset and get each row
|
||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
||||
iter->GetNextRow(&row);
|
||||
|
||||
uint64_t i = 0;
|
||||
while (row.size() != 0) {
|
||||
i++;
|
||||
auto image = row["image"];
|
||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
||||
iter->GetNextRow(&row);
|
||||
}
|
||||
|
||||
EXPECT_EQ(i, 20);
|
||||
|
||||
// Manually terminate the pipeline
|
||||
iter->Stop();
|
||||
}
|
||||
|
||||
TEST_F(MindDataTestPipeline, TestRandomSolarize) {
|
||||
// Create an ImageFolder Dataset
|
||||
std::string folder_path = datasets_root_path_ + "/testPK/data/";
|
||||
|
|
Loading…
Reference in New Issue