From 1775c4e83d527efe1ff86be892e8a61456b92462 Mon Sep 17 00:00:00 2001 From: Cathy Wong Date: Fri, 12 Mar 2021 15:09:27 -0500 Subject: [PATCH] dataset: Reinstate INFO logging and data verification - part 3 Updates: GetItemAt, CreateFromVector, CreateScalar. Add TEST_MS_LOG_MSTENSOR. Wrap ASSERT_OK around Tensor function calls. --- .../ut/cpp/dataset/c_api_dataset_clue_test.cc | 170 ++-- .../ut/cpp/dataset/c_api_dataset_coco_test.cc | 6 +- .../cpp/dataset/c_api_dataset_config_test.cc | 53 +- .../ut/cpp/dataset/c_api_dataset_csv_test.cc | 244 ++--- .../dataset/c_api_dataset_manifest_test.cc | 15 +- .../dataset/c_api_dataset_minddata_test.cc | 35 +- .../ut/cpp/dataset/c_api_dataset_ops_test.cc | 36 +- tests/ut/cpp/dataset/c_api_dataset_save.cc | 14 +- .../dataset/c_api_dataset_textfile_test.cc | 150 +-- .../dataset/c_api_dataset_tfrecord_test.cc | 35 +- tests/ut/cpp/dataset/c_api_datasets_test.cc | 2 +- tests/ut/cpp/dataset/c_api_epoch_ctrl_test.cc | 52 +- .../c_api_text_sentence_piece_vocab_test.cc | 36 +- tests/ut/cpp/dataset/c_api_text_test.cc | 950 +++++++++++------- tests/ut/cpp/dataset/c_api_text_vocab_test.cc | 108 +- tests/ut/cpp/dataset/c_api_transforms_test.cc | 10 +- .../cpp/dataset/c_api_vision_r_to_z_test.cc | 2 +- tests/ut/cpp/dataset/common/common.cc | 4 +- tests/ut/cpp/dataset/common/common.h | 8 + 19 files changed, 1136 insertions(+), 794 deletions(-) diff --git a/tests/ut/cpp/dataset/c_api_dataset_clue_test.cc b/tests/ut/cpp/dataset/c_api_dataset_clue_test.cc index 30dda9633d5..1186f6ff850 100644 --- a/tests/ut/cpp/dataset/c_api_dataset_clue_test.cc +++ b/tests/ut/cpp/dataset/c_api_dataset_clue_test.cc @@ -48,16 +48,18 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetAFQMC) { iter->GetNextRow(&row); EXPECT_NE(row.find("sentence1"), row.end()); - // std::vector expected_result = {"蚂蚁借呗等额还款能否换成先息后本", "蚂蚁花呗说我违约了", - // "帮我看看本月花呗账单结清了没"}; + std::vector expected_result = {"蚂蚁借呗等额还款能否换成先息后本", "蚂蚁花呗说我违约了", + "帮我看看本月花呗账单结清了没"}; uint64_t i = 0; while (row.size() != 0) { auto text = row["sentence1"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); iter->GetNextRow(&row); i++; @@ -71,7 +73,7 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetAFQMC) { // test usage = "test"; - // expected_result = {"借呗取消的时间", "网商贷用什么方法转变成借呗", "我的借呗为什么开通不了"}; + expected_result = {"借呗取消的时间", "网商贷用什么方法转变成借呗", "我的借呗为什么开通不了"}; ds = CLUE({test_file}, task, usage, 0, ShuffleMode::kFalse); EXPECT_NE(ds, nullptr); iter = ds->CreateIterator(); @@ -80,11 +82,13 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetAFQMC) { EXPECT_NE(row.find("sentence1"), row.end()); i = 0; while (row.size() != 0) { - // auto text = row["sentence1"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + auto text = row["sentence1"]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); iter->GetNextRow(&row); i++; } @@ -92,7 +96,7 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetAFQMC) { // eval usage = "eval"; - // expected_result = {"你有花呗吗", "吃饭能用花呗吗", "蚂蚁花呗支付金额有什么限制"}; + expected_result = {"你有花呗吗", "吃饭能用花呗吗", "蚂蚁花呗支付金额有什么限制"}; ds = CLUE({eval_file}, task, usage, 0, ShuffleMode::kFalse); EXPECT_NE(ds, nullptr); iter = ds->CreateIterator(); @@ -101,11 +105,13 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetAFQMC) { EXPECT_NE(row.find("sentence1"), row.end()); i = 0; while (row.size() != 0) { - // auto text = row["sentence1"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + auto text = row["sentence1"]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); iter->GetNextRow(&row); i++; } @@ -238,15 +244,17 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetCMNLI) { iter->GetNextRow(&row); EXPECT_NE(row.find("sentence1"), row.end()); - // std::vector expected_result = {"你应该给这件衣服定一个价格。", "我怎么知道他要说什么", "向左。"}; + std::vector expected_result = {"你应该给这件衣服定一个价格。", "我怎么知道他要说什么", "向左。"}; uint64_t i = 0; while (row.size() != 0) { auto text = row["sentence1"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); iter->GetNextRow(&row); i++; @@ -279,15 +287,17 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetCSL) { iter->GetNextRow(&row); EXPECT_NE(row.find("abst"), row.end()); - // std::vector expected_result = {"这是一段长文本", "这是一段长文本", "这是一段长文本"}; + std::vector expected_result = {"这是一段长文本", "这是一段长文本", "这是一段长文本"}; uint64_t i = 0; while (row.size() != 0) { auto text = row["abst"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); iter->GetNextRow(&row); i++; @@ -420,15 +430,17 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetIFLYTEK) { iter->GetNextRow(&row); EXPECT_NE(row.find("sentence"), row.end()); - // std::vector expected_result = {"第一个文本", "第二个文本", "第三个文本"}; + std::vector expected_result = {"第一个文本", "第二个文本", "第三个文本"}; uint64_t i = 0; while (row.size() != 0) { auto text = row["sentence"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); iter->GetNextRow(&row); i++; @@ -475,22 +487,24 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleFilesA) { iter->GetNextRow(&row); EXPECT_NE(row.find("sentence1"), row.end()); - // std::vector expected_result = {"你有花呗吗", - // "吃饭能用花呗吗", - // "蚂蚁花呗支付金额有什么限制", - // "蚂蚁借呗等额还款能否换成先息后本", - // "蚂蚁花呗说我违约了", - // "帮我看看本月花呗账单结清了没"}; + std::vector expected_result = {"你有花呗吗", + "吃饭能用花呗吗", + "蚂蚁花呗支付金额有什么限制", + "蚂蚁借呗等额还款能否换成先息后本", + "蚂蚁花呗说我违约了", + "帮我看看本月花呗账单结清了没"}; uint64_t i = 0; while (row.size() != 0) { - // auto text = row["sentence1"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + auto text = row["sentence1"]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // Compare against expected result + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); i++; iter->GetNextRow(&row); } @@ -540,21 +554,23 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleFilesB) { iter->GetNextRow(&row); EXPECT_NE(row.find("sentence1"), row.end()); - // std::vector expected_result = {"你有花呗吗", - // "吃饭能用花呗吗", - // "蚂蚁花呗支付金额有什么限制", - // "蚂蚁借呗等额还款能否换成先息后本", - // "蚂蚁花呗说我违约了", - // "帮我看看本月花呗账单结清了没"}; + std::vector expected_result = {"你有花呗吗", + "吃饭能用花呗吗", + "蚂蚁花呗支付金额有什么限制", + "蚂蚁借呗等额还款能否换成先息后本", + "蚂蚁花呗说我违约了", + "帮我看看本月花呗账单结清了没"}; uint64_t i = 0; while (row.size() != 0) { - // auto text = row["sentence1"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); + auto text = row["sentence1"]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); i++; iter->GetNextRow(&row); } @@ -598,15 +614,17 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleGlobal) { iter->GetNextRow(&row); EXPECT_NE(row.find("sentence1"), row.end()); - // std::vector expected_result = {"蚂蚁花呗说我违约了", "帮我看看本月花呗账单结清了没", - // "蚂蚁借呗等额还款能否换成先息后本"}; + std::vector expected_result = {"蚂蚁花呗说我违约了", "帮我看看本月花呗账单结清了没", + "蚂蚁借呗等额还款能否换成先息后本"}; uint64_t i = 0; while (row.size() != 0) { auto text = row["sentence1"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); i++; iter->GetNextRow(&row); @@ -643,15 +661,17 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetTNEWS) { iter->GetNextRow(&row); EXPECT_NE(row.find("sentence"), row.end()); - // std::vector expected_result = {"新闻1", "新闻2", "新闻3"}; + std::vector expected_result = {"新闻1", "新闻2", "新闻3"}; uint64_t i = 0; while (row.size() != 0) { auto text = row["sentence"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); iter->GetNextRow(&row); i++; @@ -684,16 +704,18 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetWSC) { iter->GetNextRow(&row); EXPECT_NE(row.find("text"), row.end()); - // std::vector expected_result = {"小明呢,他在哪?", "小红刚刚看到小明,他在操场", - // "等小明回来,小张你叫他交作业"}; + std::vector expected_result = {"小明呢,他在哪?", "小红刚刚看到小明,他在操场", + "等小明回来,小张你叫他交作业"}; uint64_t i = 0; while (row.size() != 0) { auto text = row["text"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); iter->GetNextRow(&row); i++; diff --git a/tests/ut/cpp/dataset/c_api_dataset_coco_test.cc b/tests/ut/cpp/dataset/c_api_dataset_coco_test.cc index b36bdce12b9..d07992c9ef6 100644 --- a/tests/ut/cpp/dataset/c_api_dataset_coco_test.cc +++ b/tests/ut/cpp/dataset/c_api_dataset_coco_test.cc @@ -329,8 +329,7 @@ TEST_F(MindDataTestPipeline, TestCocoPanoptic) { EXPECT_MSTENSOR_EQ(bbox, expect_bbox); std::shared_ptr de_expect_categoryid; - ASSERT_OK( - Tensor::CreateFromVector(expect_categoryid_vector[i], TensorShape({bbox_size, 1}), &de_expect_categoryid)); + ASSERT_OK(Tensor::CreateFromVector(expect_categoryid_vector[i], TensorShape({bbox_size, 1}), &de_expect_categoryid)); mindspore::MSTensor expect_categoryid = mindspore::MSTensor(std::make_shared(de_expect_categoryid)); EXPECT_MSTENSOR_EQ(category_id, expect_categoryid); @@ -420,8 +419,7 @@ TEST_F(MindDataTestPipeline, TestCocoStuff) { EXPECT_MSTENSOR_EQ(image, expect_image); std::shared_ptr de_expect_segmentation; - ASSERT_OK( - Tensor::CreateFromVector(expect_segmentation_vector[i], TensorShape(expect_size[i]), &de_expect_segmentation)); + ASSERT_OK(Tensor::CreateFromVector(expect_segmentation_vector[i], TensorShape(expect_size[i]), &de_expect_segmentation)); mindspore::MSTensor expect_segmentation = mindspore::MSTensor(std::make_shared(de_expect_segmentation)); EXPECT_MSTENSOR_EQ(segmentation, expect_segmentation); diff --git a/tests/ut/cpp/dataset/c_api_dataset_config_test.cc b/tests/ut/cpp/dataset/c_api_dataset_config_test.cc index 80bd63400c9..a71a9547e25 100644 --- a/tests/ut/cpp/dataset/c_api_dataset_config_test.cc +++ b/tests/ut/cpp/dataset/c_api_dataset_config_test.cc @@ -132,18 +132,21 @@ TEST_F(MindDataTestPipeline, TestShuffleWithSeed) { iter->GetNextRow(&row); EXPECT_NE(row.find("text"), row.end()); - // std::vector expected_result = {"Good luck to everyone.", "Be happy every day.", "This is a text - // file."}; + std::vector expected_result = {"Good luck to everyone.", "Be happy every day.", "This is a text file."}; uint64_t i = 0; while (row.size() != 0) { - // auto text = row["text"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + auto text = row["text"]; + + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + i++; iter->GetNextRow(&row); } @@ -194,22 +197,26 @@ TEST_F(MindDataTestPipeline, TestCallShuffleTwice) { iter->GetNextRow(&row); EXPECT_NE(row.find("text"), row.end()); - // std::vector first_copy; - // std::vector second_copy; + std::vector first_copy; + std::vector second_copy; uint64_t i = 0; while (row.size() != 0) { - // auto text = row["text"]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + auto text = row["text"]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // The first three samples are the first copy and the rest are the second - // if (i < 3) { - // first_copy.push_back(ss); - // } else { - // second_copy.push_back(ss); - // } + if (i < 3) { + first_copy.push_back(ss); + } else { + second_copy.push_back(ss); + } + i++; iter->GetNextRow(&row); } @@ -218,9 +225,9 @@ TEST_F(MindDataTestPipeline, TestCallShuffleTwice) { EXPECT_EQ(i, 6); // Compare the two copies which are deterministic difference - // for (int j = 0; j < 3; j++) { - // EXPECT_STRNE(first_copy.at(j).c_str(), second_copy.at(j).c_str()); - // } + for (int j = 0; j < 3; j++) { + EXPECT_STRNE(first_copy.at(j).c_str(), second_copy.at(j).c_str()); + } // Manually terminate the pipeline iter->Stop(); diff --git a/tests/ut/cpp/dataset/c_api_dataset_csv_test.cc b/tests/ut/cpp/dataset/c_api_dataset_csv_test.cc index 12b5cf09a88..9b074ab9ee7 100644 --- a/tests/ut/cpp/dataset/c_api_dataset_csv_test.cc +++ b/tests/ut/cpp/dataset/c_api_dataset_csv_test.cc @@ -44,21 +44,23 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetBasic) { std::unordered_map row; iter->GetNextRow(&row); EXPECT_NE(row.find("col1"), row.end()); - // std::vector> expected_result = { - // {"1", "2", "3", "4"}, - // {"5", "6", "7", "8"}, - // {"9", "10", "11", "12"}, - // }; + std::vector> expected_result = { + {"1", "2", "3", "4"}, + {"5", "6", "7", "8"}, + {"9", "10", "11", "12"}, + }; uint64_t i = 0; while (row.size() != 0) { - // for (int j = 0; j < column_names.size(); j++) { - // auto text = row[column_names[j]]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); - // } + for (int j = 0; j < column_names.size(); j++) { + auto text = row[column_names[j]]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); + } iter->GetNextRow(&row); i++; } @@ -109,20 +111,22 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetMultiFiles) { std::unordered_map row; iter->GetNextRow(&row); EXPECT_NE(row.find("col1"), row.end()); - // std::vector> expected_result = { - // {"17", "18", "19", "20"}, {"1", "2", "3", "4"}, {"5", "6", "7", "8"}, - // {"13", "14", "15", "16"}, {"21", "22", "23", "24"}, {"9", "10", "11", "12"}, - // }; + std::vector> expected_result = { + {"17", "18", "19", "20"}, {"1", "2", "3", "4"}, {"5", "6", "7", "8"}, + {"13", "14", "15", "16"}, {"21", "22", "23", "24"}, {"9", "10", "11", "12"}, + }; uint64_t i = 0; while (row.size() != 0) { - // for (int j = 0; j < column_names.size(); j++) { - // auto text = row[column_names[j]]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); - // } + for (int j = 0; j < column_names.size(); j++) { + auto text = row[column_names[j]]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); + } iter->GetNextRow(&row); i++; } @@ -156,17 +160,19 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetNumSamples) { std::unordered_map row; iter->GetNextRow(&row); EXPECT_NE(row.find("col1"), row.end()); - // std::vector> expected_result = {{"1", "2", "3", "4"}, {"5", "6", "7", "8"}}; + std::vector> expected_result = {{"1", "2", "3", "4"}, {"5", "6", "7", "8"}}; uint64_t i = 0; while (row.size() != 0) { - // for (int j = 0; j < column_names.size(); j++) { - // auto text = row[column_names[j]]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); - // } + for (int j = 0; j < column_names.size(); j++) { + auto text = row[column_names[j]]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); + } iter->GetNextRow(&row); i++; } @@ -196,17 +202,19 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetDistribution) { std::unordered_map row; iter->GetNextRow(&row); EXPECT_NE(row.find("col1"), row.end()); - // std::vector> expected_result = {{"1", "2", "3", "4"}, {"5", "6", "7", "8"}}; + std::vector> expected_result = {{"1", "2", "3", "4"}, {"5", "6", "7", "8"}}; uint64_t i = 0; while (row.size() != 0) { - // for (int j = 0; j < column_names.size(); j++) { - // auto text = row[column_names[j]]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); - // } + for (int j = 0; j < column_names.size(); j++) { + auto text = row[column_names[j]]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); + } iter->GetNextRow(&row); i++; } @@ -241,41 +249,43 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetType) { // Iterate the dataset and get each row std::unordered_map row; iter->GetNextRow(&row); - // std::vector>> expected = { - // { - // std::make_shared>(CsvType::STRING, ""), - // std::make_shared>(CsvType::INT, 2), - // std::make_shared>(CsvType::FLOAT, 3.0), - // std::make_shared>(CsvType::STRING, ""), - // }, - // { - // std::make_shared>(CsvType::STRING, "a"), - // std::make_shared>(CsvType::INT, 4), - // std::make_shared>(CsvType::FLOAT, 5.0), - // std::make_shared>(CsvType::STRING, "b"), - // }, - // }; + std::vector>> expected = { + { + std::make_shared>(CsvType::STRING, ""), + std::make_shared>(CsvType::INT, 2), + std::make_shared>(CsvType::FLOAT, 3.0), + std::make_shared>(CsvType::STRING, ""), + }, + { + std::make_shared>(CsvType::STRING, "a"), + std::make_shared>(CsvType::INT, 4), + std::make_shared>(CsvType::FLOAT, 5.0), + std::make_shared>(CsvType::STRING, "b"), + }, + }; EXPECT_NE(row.find("col1"), row.end()); uint64_t i = 0; while (row.size() != 0) { - // for (int j = 0; j < column_names.size(); j++) { - // auto text = row[column_names[j]]; - // if (colum_type[j]->type == CsvType::INT) { - // int val; - // text->GetItemAt(&val, {0}); - // EXPECT_EQ(val, std::dynamic_pointer_cast>(expected[i][j])->value); - // } else if (colum_type[j]->type == CsvType::FLOAT) { - // float val; - // text->GetItemAt(&val, {0}); - // EXPECT_EQ(val, std::dynamic_pointer_cast>(expected[i][j])->value); - // } else if (colum_type[j]->type == CsvType::STRING) { - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), std::dynamic_pointer_cast>(expected[i][j])->value.c_str()); - // } - // } + for (int j = 0; j < column_names.size(); j++) { + auto text = row[column_names[j]]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + if (colum_type[j]->type == CsvType::INT) { + int val; + de_text->GetItemAt(&val, {0}); + EXPECT_EQ(val, std::dynamic_pointer_cast>(expected[i][j])->value); + } else if (colum_type[j]->type == CsvType::FLOAT) { + float val; + de_text->GetItemAt(&val, {0}); + EXPECT_EQ(val, std::dynamic_pointer_cast>(expected[i][j])->value); + } else if (colum_type[j]->type == CsvType::STRING) { + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), std::dynamic_pointer_cast>(expected[i][j])->value.c_str()); + } + } iter->GetNextRow(&row); i++; } @@ -304,20 +314,22 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetHeader) { std::unordered_map row; iter->GetNextRow(&row); EXPECT_NE(row.find("col1"), row.end()); - // std::vector> expected_result = { - // {"a", "b", "c", "d"}, - // }; + std::vector> expected_result = { + {"a", "b", "c", "d"}, + }; uint64_t i = 0; - // std::vector column_names = {"col1", "col2", "col3", "col4"}; + std::vector column_names = {"col1", "col2", "col3", "col4"}; while (row.size() != 0) { - // for (int j = 0; j < column_names.size(); j++) { - // auto text = row[column_names[j]]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); - // } + for (int j = 0; j < column_names.size(); j++) { + auto text = row[column_names[j]]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); + } iter->GetNextRow(&row); i++; } @@ -411,20 +423,22 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetShuffleFilesA) { std::unordered_map row; iter->GetNextRow(&row); EXPECT_NE(row.find("col1"), row.end()); - // std::vector> expected_result = { - // {"13", "14", "15", "16"}, {"1", "2", "3", "4"}, {"17", "18", "19", "20"}, - // {"5", "6", "7", "8"}, {"21", "22", "23", "24"}, {"9", "10", "11", "12"}, - // }; + std::vector> expected_result = { + {"13", "14", "15", "16"}, {"1", "2", "3", "4"}, {"17", "18", "19", "20"}, + {"5", "6", "7", "8"}, {"21", "22", "23", "24"}, {"9", "10", "11", "12"}, + }; uint64_t i = 0; while (row.size() != 0) { - // for (int j = 0; j < column_names.size(); j++) { - // auto text = row[column_names[j]]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); - // } + for (int j = 0; j < column_names.size(); j++) { + auto text = row[column_names[j]]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); + } iter->GetNextRow(&row); i++; } @@ -466,21 +480,23 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetShuffleFilesB) { std::unordered_map row; iter->GetNextRow(&row); EXPECT_NE(row.find("col1"), row.end()); - // std::vector> expected_result = { - // {"13", "14", "15", "16"}, {"1", "2", "3", "4"}, {"17", "18", "19", "20"}, - // {"5", "6", "7", "8"}, {"21", "22", "23", "24"}, {"9", "10", "11", "12"}, - // }; + std::vector> expected_result = { + {"13", "14", "15", "16"}, {"1", "2", "3", "4"}, {"17", "18", "19", "20"}, + {"5", "6", "7", "8"}, {"21", "22", "23", "24"}, {"9", "10", "11", "12"}, + }; uint64_t i = 0; while (row.size() != 0) { - // for (int j = 0; j < column_names.size(); j++) { - // auto text = row[column_names[j]]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); - // } + for (int j = 0; j < column_names.size(); j++) { + auto text = row[column_names[j]]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); + } iter->GetNextRow(&row); i++; } @@ -522,18 +538,20 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetShuffleGlobal) { std::unordered_map row; iter->GetNextRow(&row); EXPECT_NE(row.find("col1"), row.end()); - // std::vector> expected_result = { - // {"5", "6", "7", "8"}, {"9", "10", "11", "12"}, {"1", "2", "3", "4"}}; + std::vector> expected_result = { + {"5", "6", "7", "8"}, {"9", "10", "11", "12"}, {"1", "2", "3", "4"}}; uint64_t i = 0; while (row.size() != 0) { - // for (int j = 0; j < column_names.size(); j++) { - // auto text = row[column_names[j]]; - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); - // } + for (int j = 0; j < column_names.size(); j++) { + auto text = row[column_names[j]]; + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + EXPECT_STREQ(ss.c_str(), expected_result[i][j].c_str()); + } iter->GetNextRow(&row); i++; } diff --git a/tests/ut/cpp/dataset/c_api_dataset_manifest_test.cc b/tests/ut/cpp/dataset/c_api_dataset_manifest_test.cc index c621878d032..f5217578047 100644 --- a/tests/ut/cpp/dataset/c_api_dataset_manifest_test.cc +++ b/tests/ut/cpp/dataset/c_api_dataset_manifest_test.cc @@ -238,15 +238,20 @@ TEST_F(MindDataTestPipeline, TestManifestClassIndex) { iter->GetNextRow(&row); uint64_t i = 0; - // int32_t label_idx = 0; + int32_t label_idx = 0; while (row.size() != 0) { i++; auto image = row["image"]; + auto label = row["label"]; MS_LOG(INFO) << "Tensor image shape: " << image.Shape(); - // row["label"]->GetItemAt(&label_idx, {}); - // MS_LOG(INFO) << "Tensor label value: " << label_idx; - // auto label_it = std::find(expected_label.begin(), expected_label.end(), label_idx); - // EXPECT_NE(label_it, expected_label.end()); + + std::shared_ptr de_label; + ASSERT_OK(Tensor::CreateFromMSTensor(label, &de_label)); + de_label->GetItemAt(&label_idx, {}); + MS_LOG(INFO) << "Tensor label value: " << label_idx; + auto label_it = std::find(expected_label.begin(), expected_label.end(), label_idx); + EXPECT_NE(label_it, expected_label.end()); + iter->GetNextRow(&row); } diff --git a/tests/ut/cpp/dataset/c_api_dataset_minddata_test.cc b/tests/ut/cpp/dataset/c_api_dataset_minddata_test.cc index a63d73a1044..01383e39099 100644 --- a/tests/ut/cpp/dataset/c_api_dataset_minddata_test.cc +++ b/tests/ut/cpp/dataset/c_api_dataset_minddata_test.cc @@ -46,8 +46,8 @@ TEST_F(MindDataTestPipeline, TestMindDataSuccess1) { uint64_t i = 0; while (row.size() != 0) { i++; - // auto image = row["file_name"]; - // MS_LOG(INFO) << "Tensor image file name: " << *image; + auto image = row["file_name"]; + TEST_MS_LOG_MSTENSOR(INFO, "Tensor image file name: ", image); iter->GetNextRow(&row); } @@ -96,8 +96,9 @@ TEST_F(MindDataTestPipeline, TestMindDataSuccess2) { uint64_t i = 0; while (row.size() != 0) { i++; - // auto image = row["file_name"]; - // MS_LOG(INFO) << "Tensor image file name: " << *image; + auto image = row["file_name"]; + TEST_MS_LOG_MSTENSOR(INFO, "Tensor image file name: ", image); + iter->GetNextRow(&row); } @@ -131,8 +132,9 @@ TEST_F(MindDataTestPipeline, TestMindDataSuccess3) { uint64_t i = 0; while (row.size() != 0) { i++; - // auto image = row["file_name"]; - // MS_LOG(INFO) << "Tensor image file name: " << *image; + auto image = row["file_name"]; + TEST_MS_LOG_MSTENSOR(INFO, "Tensor image file name: ", image); + iter->GetNextRow(&row); } @@ -165,8 +167,8 @@ TEST_F(MindDataTestPipeline, TestMindDataSuccess4) { uint64_t i = 0; while (row.size() != 0) { i++; - // auto label = row["label"]; - // MS_LOG(INFO) << "Tensor label: " << *label; + auto label = row["label"]; + TEST_MS_LOG_MSTENSOR(INFO, "Tensor label: ", label); iter->GetNextRow(&row); } @@ -266,7 +268,8 @@ TEST_F(MindDataTestPipeline, TestMindDataSuccess6) { uint64_t j = 0; while (row.size() != 0) { j++; - // MS_LOG(INFO) << "Tensor label: " << *row["label"]; + auto label = row["label"]; + TEST_MS_LOG_MSTENSOR(INFO, "Tensor label: ", label); iter->GetNextRow(&row); } EXPECT_EQ(j, expected_samples[i]); @@ -312,10 +315,10 @@ TEST_F(MindDataTestPipeline, TestMindDataSuccess7) { uint64_t i = 0; while (row.size() != 0) { i++; - // auto image = row["file_name"]; + auto image = row["file_name"]; auto label = row["label"]; - // MS_LOG(INFO) << "Tensor file name: " << *image; - // MS_LOG(INFO) << "Tensor label: " << *label; + TEST_MS_LOG_MSTENSOR(INFO, "Tensor image file name: ", image); + TEST_MS_LOG_MSTENSOR(INFO, "Tensor label: ", label); EXPECT_MSTENSOR_EQ(label, expect_item); @@ -383,10 +386,10 @@ TEST_F(MindDataTestPipeline, TestMindDataSuccess8) { uint64_t i = 0; while (row.size() != 0) { i++; - // auto image = row["file_name"]; + auto image = row["file_name"]; auto label = row["label"]; - // MS_LOG(INFO) << "Tensor file name: " << *image; - // MS_LOG(INFO) << "Tensor label: " << *label; + TEST_MS_LOG_MSTENSOR(INFO, "Tensor image file name: ", image); + TEST_MS_LOG_MSTENSOR(INFO, "Tensor label: ", label); EXPECT_MSTENSOR_EQ(label, expect_item); @@ -459,7 +462,7 @@ TEST_F(MindDataTestPipeline, TestMindDataSuccess9) { while (row.size() != 0) { i++; auto label = row["label"]; - // MS_LOG(INFO) << "Tensor label: " << *label; + TEST_MS_LOG_MSTENSOR(INFO, "Tensor label: ", label); EXPECT_MSTENSOR_EQ(label, expect_item); diff --git a/tests/ut/cpp/dataset/c_api_dataset_ops_test.cc b/tests/ut/cpp/dataset/c_api_dataset_ops_test.cc index fa907163fcd..07ac14e2720 100644 --- a/tests/ut/cpp/dataset/c_api_dataset_ops_test.cc +++ b/tests/ut/cpp/dataset/c_api_dataset_ops_test.cc @@ -632,20 +632,24 @@ TEST_F(MindDataTestPipeline, TestFilterSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector label_list; + std::vector label_list; uint64_t i = 0; while (row.size() != 0) { i++; - // auto label = row["label"]; - // uint64_t label_value; - // label->GetItemAt(&label_value, {0}); - // label_list.push_back(label_value); + auto label = row["label"]; + + std::shared_ptr de_label; + uint64_t label_value; + ASSERT_OK(Tensor::CreateFromMSTensor(label, &de_label)); + de_label->GetItemAt(&label_value, {0}); + label_list.push_back(label_value); + iter->GetNextRow(&row); } // Only 1 column whose label is equal to 3 EXPECT_EQ(i, 1); - // EXPECT_EQ(label_list.at(0), 3); + EXPECT_EQ(label_list.at(0), 3); // Manually terminate the pipeline iter->Stop(); @@ -674,21 +678,25 @@ TEST_F(MindDataTestPipeline, TestFilterSuccess2) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector label_list; + std::vector label_list; uint64_t i = 0; while (row.size() != 0) { i++; - // auto label = row["label"]; - // uint64_t label_value; - // label->GetItemAt(&label_value, {0}); - // label_list.push_back(label_value); + auto label = row["label"]; + + std::shared_ptr de_label; + uint64_t label_value; + ASSERT_OK(Tensor::CreateFromMSTensor(label, &de_label)); + de_label->GetItemAt(&label_value, {0}); + label_list.push_back(label_value); + iter->GetNextRow(&row); } // There are 2 columns whose label is more than 1 EXPECT_EQ(i, 2); - // EXPECT_EQ(label_list.at(0), 2); - // EXPECT_EQ(label_list.at(1), 3); + EXPECT_EQ(label_list.at(0), 2); + EXPECT_EQ(label_list.at(1), 3); // Manually terminate the pipeline iter->Stop(); @@ -1079,7 +1087,7 @@ TEST_F(MindDataTestPipeline, TestProjectMapAutoInjection) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image.Shape(); - // EXPECT_EQ(image.Shape()[0], 30); + EXPECT_EQ(image.Shape()[0], 30); iter->GetNextRow(&row); } diff --git a/tests/ut/cpp/dataset/c_api_dataset_save.cc b/tests/ut/cpp/dataset/c_api_dataset_save.cc index 13807a27f91..5bd7e409f9c 100644 --- a/tests/ut/cpp/dataset/c_api_dataset_save.cc +++ b/tests/ut/cpp/dataset/c_api_dataset_save.cc @@ -46,9 +46,10 @@ TEST_F(MindDataTestPipeline, TestSaveCifar10AndLoad) { // Save original data for comparison uint64_t i = 0; while (row.size() != 0) { - // auto label = row["label"]; - // original_data.push_back(label); - // MS_LOG(INFO) << "Tensor label: " << *label; + auto label = row["label"]; + original_data.push_back(label); + TEST_MS_LOG_MSTENSOR(INFO, "Tensor label: ", label); + iter->GetNextRow(&row); i++; } @@ -97,9 +98,10 @@ TEST_F(MindDataTestPipeline, TestSaveCifar10AndLoad) { // Expect the output data is same with original_data uint64_t j = 0; while (row_minddata.size() != 0) { - // auto label = row_minddata["label"]; - // EXPECT_EQ(*original_data[j], *label); - // MS_LOG(INFO) << "Tensor label: " << *label; + auto label = row_minddata["label"]; + EXPECT_MSTENSOR_EQ(original_data[j], label); + TEST_MS_LOG_MSTENSOR(INFO, "Tensor label: ", label); + iter_minddata->GetNextRow(&row_minddata); j++; } diff --git a/tests/ut/cpp/dataset/c_api_dataset_textfile_test.cc b/tests/ut/cpp/dataset/c_api_dataset_textfile_test.cc index 23ec963bd1f..cf0fe29034e 100644 --- a/tests/ut/cpp/dataset/c_api_dataset_textfile_test.cc +++ b/tests/ut/cpp/dataset/c_api_dataset_textfile_test.cc @@ -60,12 +60,15 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetBasic) { while (row.size() != 0) { auto text = row["text"]; MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // Compare against expected result + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + i++; iter->GetNextRow(&row); } @@ -317,12 +320,15 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFalse1A) { while (row.size() != 0) { auto text = row["text"]; MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // Compare against expected result + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + i++; iter->GetNextRow(&row); } @@ -375,12 +381,15 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFalse1B) { while (row.size() != 0) { auto text = row["text"]; MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // Compare against expected result + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + i++; iter->GetNextRow(&row); } @@ -432,12 +441,15 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFalse4Shard) { while (row.size() != 0) { auto text = row["text"]; MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // Compare against expected result + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + i++; iter->GetNextRow(&row); } @@ -492,12 +504,15 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles1A) { while (row.size() != 0) { auto text = row["text"]; MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // Compare against expected result + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + i++; iter->GetNextRow(&row); } @@ -552,12 +567,15 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles1B) { while (row.size() != 0) { auto text = row["text"]; MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // Compare against expected result + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + i++; iter->GetNextRow(&row); } @@ -611,12 +629,15 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles4) { while (row.size() != 0) { auto text = row["text"]; MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // Compare against expected result + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + i++; iter->GetNextRow(&row); } @@ -666,12 +687,15 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1A) { while (row.size() != 0) { auto text = row["text"]; MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // Compare against expected result + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + i++; iter->GetNextRow(&row); } @@ -724,12 +748,15 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1B) { while (row.size() != 0) { auto text = row["text"]; MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // Compare against expected result + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + i++; iter->GetNextRow(&row); } @@ -782,12 +809,15 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal4) { while (row.size() != 0) { auto text = row["text"]; MS_LOG(INFO) << "Tensor text shape: " << text.Shape(); - // std::string_view sv; - // text->GetItemAt(&sv, {0}); - // std::string ss(sv); - // MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); - // // Compare against expected result - // EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + std::shared_ptr de_text; + ASSERT_OK(Tensor::CreateFromMSTensor(text, &de_text)); + std::string_view sv; + de_text->GetItemAt(&sv, {0}); + std::string ss(sv); + MS_LOG(INFO) << "Text length: " << ss.length() << ", Text: " << ss.substr(0, 50); + // Compare against expected result + EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); + i++; iter->GetNextRow(&row); } diff --git a/tests/ut/cpp/dataset/c_api_dataset_tfrecord_test.cc b/tests/ut/cpp/dataset/c_api_dataset_tfrecord_test.cc index 7f03e939445..e15ac9ec021 100644 --- a/tests/ut/cpp/dataset/c_api_dataset_tfrecord_test.cc +++ b/tests/ut/cpp/dataset/c_api_dataset_tfrecord_test.cc @@ -148,12 +148,21 @@ TEST_F(MindDataTestPipeline, TestTFRecordDatasetShuffle) { iter2->GetNextRow(&row2); uint64_t i = 0; - // int64_t value1 = 0; - // int64_t value2 = 0; + int64_t value1 = 0; + int64_t value2 = 0; while (row1.size() != 0 && row2.size() != 0) { - // row1["scalars"]->GetItemAt(&value1, {0}); - // row2["scalars"]->GetItemAt(&value2, {0}); - // EXPECT_EQ(value1, value2); + auto scalars1 = row1["scalars"]; + std::shared_ptr de_scalars1; + ASSERT_OK(Tensor::CreateFromMSTensor(scalars1, &de_scalars1)); + de_scalars1->GetItemAt(&value1, {0}); + + auto scalars2 = row2["scalars"]; + std::shared_ptr de_scalars2; + ASSERT_OK(Tensor::CreateFromMSTensor(scalars2, &de_scalars2)); + de_scalars2->GetItemAt(&value2, {0}); + + EXPECT_EQ(value1, value2); + iter1->GetNextRow(&row1); iter2->GetNextRow(&row2); i++; @@ -191,17 +200,21 @@ TEST_F(MindDataTestPipeline, TestTFRecordDatasetShuffle2) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expect = {9, 3, 4, 7, 2, 1, 6, 8, 10, 5}; - // std::vector actual = {}; - // int64_t value = 0; + std::vector expect = {9, 3, 4, 7, 2, 1, 6, 8, 10, 5}; + std::vector actual = {}; + int64_t value = 0; uint64_t i = 0; while (row.size() != 0) { - // row["scalars"]->GetItemAt(&value, {}); - // actual.push_back(value); + auto scalars = row["scalars"]; + std::shared_ptr de_scalars; + ASSERT_OK(Tensor::CreateFromMSTensor(scalars, &de_scalars)); + de_scalars->GetItemAt(&value, {0}); + actual.push_back(value); + iter->GetNextRow(&row); i++; } - // ASSERT_EQ(actual, expect); + ASSERT_EQ(actual, expect); EXPECT_EQ(i, 10); // Manually terminate the pipeline iter->Stop(); diff --git a/tests/ut/cpp/dataset/c_api_datasets_test.cc b/tests/ut/cpp/dataset/c_api_datasets_test.cc index 954016aa54f..217588b63e4 100644 --- a/tests/ut/cpp/dataset/c_api_datasets_test.cc +++ b/tests/ut/cpp/dataset/c_api_datasets_test.cc @@ -222,7 +222,7 @@ TEST_F(MindDataTestPipeline, TestImageFolderFailWithWrongExtensionFail) { std::unordered_map row; iter->GetNextRow(&row); // Expect no data: cannot find files with specified extension - // EXPECT_EQ(row.size(), 0); + EXPECT_EQ(row.size(), 0); // Manually terminate the pipeline iter->Stop(); diff --git a/tests/ut/cpp/dataset/c_api_epoch_ctrl_test.cc b/tests/ut/cpp/dataset/c_api_epoch_ctrl_test.cc index eb213c9e913..f0129662337 100644 --- a/tests/ut/cpp/dataset/c_api_epoch_ctrl_test.cc +++ b/tests/ut/cpp/dataset/c_api_epoch_ctrl_test.cc @@ -26,10 +26,10 @@ class MindDataTestEpochCtrl : public UT::DatasetOpTesting { TEST_F(MindDataTestEpochCtrl, TestAutoInjectEpoch) { MS_LOG(INFO) << "Doing MindDataTestEpochCtrl-TestAutoInjectEpoch."; - // int32_t img_class[4] = {0, 1, 2, 3}; + int32_t img_class[4] = {0, 1, 2, 3}; int32_t num_epochs = 2 + std::rand() % 3; int32_t sampler_size = 44; - // int32_t class_size = 11; + int32_t class_size = 11; MS_LOG(INFO) << "num_epochs: " << num_epochs; // Create an ImageFolder Dataset @@ -50,10 +50,12 @@ TEST_F(MindDataTestEpochCtrl, TestAutoInjectEpoch) { iter->GetNextRow(&row); while (row.size() != 0) { - // auto label = row["label"]; - // int32_t label_value; - // label->GetItemAt(&label_value, {0}); - // EXPECT_TRUE(img_class[(i % sampler_size) / class_size] == label_value); + auto label = row["label"]; + std::shared_ptr de_label; + int64_t label_value; + ASSERT_OK(Tensor::CreateFromMSTensor(label, &de_label)); + de_label->GetItemAt(&label_value, {0}); + EXPECT_TRUE(img_class[(i % sampler_size) / class_size] == label_value); iter->GetNextRow(&row); i++; @@ -64,7 +66,7 @@ TEST_F(MindDataTestEpochCtrl, TestAutoInjectEpoch) { // Try to fetch data beyond the specified number of epochs. iter->GetNextRow(&row); - // EXPECT_EQ(row.size(), 2); + EXPECT_EQ(row.size(), 2); // Manually terminate the pipeline iter->Stop(); @@ -94,10 +96,12 @@ TEST_F(MindDataTestEpochCtrl, TestEpoch) { for (int epoch = 0; epoch < num_epochs; epoch++) { iter->GetNextRow(&row); while (row.size() != 0) { - // auto label = row["label"]; - // int32_t label_value; - // label->GetItemAt(&label_value, {0}); - // EXPECT_TRUE(label_value >= 0 && label_value <= 3); + auto label = row["label"]; + std::shared_ptr de_label; + int64_t label_value; + ASSERT_OK(Tensor::CreateFromMSTensor(label, &de_label)); + de_label->GetItemAt(&label_value, {0}); + EXPECT_TRUE(label_value >= 0 && label_value <= 3); iter->GetNextRow(&row); i++; @@ -109,7 +113,7 @@ TEST_F(MindDataTestEpochCtrl, TestEpoch) { // Try to fetch data beyond the specified number of epochs. iter->GetNextRow(&row); - // EXPECT_EQ(row.size(), 2); + EXPECT_EQ(row.size(), 2); // Manually terminate the pipeline iter->Stop(); @@ -141,10 +145,12 @@ TEST_F(MindDataTestEpochCtrl, TestRepeatEpoch) { for (int epoch = 0; epoch < num_epochs; epoch++) { iter->GetNextRow(&row); while (row.size() != 0) { - // auto label = row["label"]; - // int32_t label_value; - // label->GetItemAt(&label_value, {0}); - // EXPECT_TRUE(label_value >= 0 && label_value <= 3); + auto label = row["label"]; + std::shared_ptr de_label; + int64_t label_value; + ASSERT_OK(Tensor::CreateFromMSTensor(label, &de_label)); + de_label->GetItemAt(&label_value, {0}); + EXPECT_TRUE(label_value >= 0 && label_value <= 3); iter->GetNextRow(&row); i++; @@ -156,7 +162,7 @@ TEST_F(MindDataTestEpochCtrl, TestRepeatEpoch) { // Try to fetch data beyond the specified number of epochs. iter->GetNextRow(&row); - // EXPECT_EQ(row.size(), 2); + EXPECT_EQ(row.size(), 2); // Manually terminate the pipeline iter->Stop(); @@ -188,10 +194,12 @@ TEST_F(MindDataTestEpochCtrl, TestRepeatRepeatEpoch) { for (int epoch = 0; epoch < num_epochs; epoch++) { iter->GetNextRow(&row); while (row.size() != 0) { - // auto label = row["label"]; - // int32_t label_value; - // label->GetItemAt(&label_value, {0}); - // EXPECT_TRUE(label_value >= 0 && label_value <= 3); + auto label = row["label"]; + std::shared_ptr de_label; + int64_t label_value; + ASSERT_OK(Tensor::CreateFromMSTensor(label, &de_label)); + de_label->GetItemAt(&label_value, {0}); + EXPECT_TRUE(label_value >= 0 && label_value <= 3); iter->GetNextRow(&row); i++; @@ -203,7 +211,7 @@ TEST_F(MindDataTestEpochCtrl, TestRepeatRepeatEpoch) { // Try to fetch data beyond the specified number of epochs. iter->GetNextRow(&row); - // EXPECT_EQ(row.size(), 2); + EXPECT_EQ(row.size(), 2); // Manually terminate the pipeline iter->Stop(); diff --git a/tests/ut/cpp/dataset/c_api_text_sentence_piece_vocab_test.cc b/tests/ut/cpp/dataset/c_api_text_sentence_piece_vocab_test.cc index de1d3789fcb..0907a75b054 100644 --- a/tests/ut/cpp/dataset/c_api_text_sentence_piece_vocab_test.cc +++ b/tests/ut/cpp/dataset/c_api_text_sentence_piece_vocab_test.cc @@ -71,15 +71,19 @@ TEST_F(MindDataTestPipeline, TestSentencePieceVocabSuccess1) { iter->GetNextRow(&row); // Expected result after tokenization - // std::vector expected = {"▁I", "▁sa", "w", "▁a", "▁girl", "▁with", "▁a", "▁te", "les", "co", "pe", - // "."}; + std::vector expected = {"▁I", "▁sa", "w", "▁a", "▁girl", "▁with", "▁a", "▁te", "les", "co", "pe", "."}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + uint64_t i = 0; while (row.size() != 0) { - // auto txt = row["text"]; - // MS_LOG(INFO) << *txt; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*txt, *expected_tensor); + auto txt = row["text"]; + TEST_MS_LOG_MSTENSOR(INFO, "txt: ", txt); + + EXPECT_MSTENSOR_EQ(txt, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -127,15 +131,19 @@ TEST_F(MindDataTestPipeline, TestSentencePieceVocabSuccess2) { iter->GetNextRow(&row); // Expected result after tokenization - // std::vector expected = {"▁I", "▁sa", "w", "▁a", "▁girl", "▁with", "▁a", "▁te", "les", "co", "pe", - // "."}; + std::vector expected = {"▁I", "▁sa", "w", "▁a", "▁girl", "▁with", "▁a", "▁te", "les", "co", "pe", "."}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + uint64_t i = 0; while (row.size() != 0) { - // auto txt = row["text"]; - // MS_LOG(INFO) << *txt; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*txt, *expected_tensor); + auto txt = row["text"]; + TEST_MS_LOG_MSTENSOR(INFO, "txt: ", txt); + + EXPECT_MSTENSOR_EQ(txt, expected_tensor); + iter->GetNextRow(&row); i++; } diff --git a/tests/ut/cpp/dataset/c_api_text_test.cc b/tests/ut/cpp/dataset/c_api_text_test.cc index 053c94e3966..13414cbfdb1 100644 --- a/tests/ut/cpp/dataset/c_api_text_test.cc +++ b/tests/ut/cpp/dataset/c_api_text_test.cc @@ -65,24 +65,27 @@ TEST_F(MindDataTestPipeline, TestBasicTokenizerSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = { - // {"Welcome", "to", "Beijing", "北", "京", "欢", "迎", "您"}, - // {"長", "風", "破", "浪", "會", "有", "時", ",", "直", "掛", "雲", "帆", "濟", "滄", "海"}, - // {"😀", "嘿", "嘿", "😃", "哈", "哈", "😄", "大", "笑", "😁", "嘻", "嘻"}, - // {"明", "朝", "(", "1368", "—", "1644", "年", ")", "和", "清", "朝", "(", "1644", "—", "1911", "年", ")", - // ",", "是", "中", "国", "封", "建", "王", "朝", "史", "上", "最", "后", "两", "个", "朝", "代"}, - // {"明", "代", "(", "1368", "-", "1644", ")", "と", "清", "代", "(", "1644", - // "-", "1911", ")", "は", "、", "中", "国", "の", "封", "建", "王", "朝", - // "の", "歴", "史", "における", "最", "後", "の2つの", "王", "朝", "でした"}, - // {"명나라", "(", "1368", "-", "1644", ")", "와", "청나라", "(", "1644", "-", - // "1911", ")", "는", "중국", "봉건", "왕조의", "역사에서", "마지막", "두", "왕조였다"}}; + std::vector> expected = { + {"Welcome", "to", "Beijing", "北", "京", "欢", "迎", "您"}, + {"長", "風", "破", "浪", "會", "有", "時", ",", "直", "掛", "雲", "帆", "濟", "滄", "海"}, + {"😀", "嘿", "嘿", "😃", "哈", "哈", "😄", "大", "笑", "😁", "嘻", "嘻"}, + {"明", "朝", "(", "1368", "—", "1644", "年", ")", "和", "清", "朝", "(", "1644", "—", "1911", "年", ")", + ",", "是", "中", "国", "封", "建", "王", "朝", "史", "上", "最", "后", "两", "个", "朝", "代"}, + {"明", "代", "(", "1368", "-", "1644", ")", "と", "清", "代", "(", "1644", + "-", "1911", ")", "は", "、", "中", "国", "の", "封", "建", "王", "朝", + "の", "歴", "史", "における", "最", "後", "の2つの", "王", "朝", "でした"}, + {"명나라", "(", "1368", "-", "1644", ")", "와", "청나라", "(", "1644", "-", + "1911", ")", "는", "중국", "봉건", "왕조의", "역사에서", "마지막", "두", "왕조였다"}}; uint64_t i = 0; while (row.size() != 0) { auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected[i], &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -123,14 +126,16 @@ TEST_F(MindDataTestPipeline, TestBasicTokenizerSuccess2) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"this", "is", "a", "funky", "string"}; + std::vector expected = {"this", "is", "a", "funky", "string"}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + EXPECT_MSTENSOR_EQ(ind, expected_tensor); iter->GetNextRow(&row); i++; } @@ -176,20 +181,32 @@ TEST_F(MindDataTestPipeline, TestBasicTokenizerSuccess3) { std::vector expected_offsets_start = {0, 5, 8, 10, 16}; std::vector expected_offsets_limit = {4, 7, 9, 15, 22}; + std::shared_ptr de_expected_tokens; + ASSERT_OK(Tensor::CreateFromVector(expected_tokens, &de_expected_tokens)); + mindspore::MSTensor ms_expected_tokens = + mindspore::MSTensor(std::make_shared(de_expected_tokens)); + + std::shared_ptr de_expected_offsets_start; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_start, &de_expected_offsets_start)); + mindspore::MSTensor ms_expected_offsets_start = + mindspore::MSTensor(std::make_shared(de_expected_offsets_start)); + + std::shared_ptr de_expected_offsets_limit; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_limit, &de_expected_offsets_limit)); + mindspore::MSTensor ms_expected_offsets_limit = + mindspore::MSTensor(std::make_shared(de_expected_offsets_limit)); + uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["token"]; - // mindspore::MSTensor expected_token_tensor; - // Tensor::CreateFromVector(expected_tokens, &expected_token_tensor); - // EXPECT_EQ(*ind, *expected_token_tensor); - // auto start = row["offsets_start"]; - // mindspore::MSTensor expected_start_tensor; - // Tensor::CreateFromVector(expected_offsets_start, &expected_start_tensor); - // EXPECT_EQ(*start, *expected_start_tensor); - // auto limit = row["offsets_limit"]; - // mindspore::MSTensor expected_limit_tensor; - // Tensor::CreateFromVector(expected_offsets_limit, &expected_limit_tensor); - // EXPECT_EQ(*limit, *expected_limit_tensor); + auto ind = row["token"]; + EXPECT_MSTENSOR_EQ(ind, ms_expected_tokens); + + auto start = row["offsets_start"]; + EXPECT_MSTENSOR_EQ(start, ms_expected_offsets_start); + + auto limit = row["offsets_limit"]; + EXPECT_MSTENSOR_EQ(limit, ms_expected_offsets_limit); + iter->GetNextRow(&row); i++; } @@ -242,17 +259,20 @@ TEST_F(MindDataTestPipeline, TestBertTokenizerSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = {{"床", "前", "明", "月", "光"}, - // {"疑", "是", "地", "上", "霜"}, - // {"举", "头", "望", "明", "月"}, - // {"低", "头", "思", "故", "乡"}}; + std::vector> expected = {{"床", "前", "明", "月", "光"}, + {"疑", "是", "地", "上", "霜"}, + {"举", "头", "望", "明", "月"}, + {"低", "头", "思", "故", "乡"}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected[i], &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -303,15 +323,17 @@ TEST_F(MindDataTestPipeline, TestBertTokenizerSuccess2) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"i", "am", "mak", "##ing", "small", "mistake", - // "##s", "during", "work", "##ing", "hour", "##s"}; + std::vector expected = {"i", "am", "mak", "##ing", "small", "mistake", + "##s", "during", "work", "##ing", "hour", "##s"}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + EXPECT_MSTENSOR_EQ(ind, expected_tensor); iter->GetNextRow(&row); i++; } @@ -362,15 +384,18 @@ TEST_F(MindDataTestPipeline, TestBertTokenizerSuccess3) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = { - // {"😀", "嘿", "嘿", "😃", "哈", "哈", "😄", "大", "笑", "😁", "嘻", "嘻"}, {"繁", "體", "字"}}; + std::vector> expected = { + {"😀", "嘿", "嘿", "😃", "哈", "哈", "😄", "大", "笑", "😁", "嘻", "嘻"}, {"繁", "體", "字"}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected[i], &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -421,14 +446,16 @@ TEST_F(MindDataTestPipeline, TestBertTokenizerSuccess4) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"[UNK]", " ", "[CLS]"}; + std::vector expected = {"[UNK]", " ", "[CLS]"}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + EXPECT_MSTENSOR_EQ(ind, expected_tensor); iter->GetNextRow(&row); i++; } @@ -479,14 +506,16 @@ TEST_F(MindDataTestPipeline, TestBertTokenizerSuccess5) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"unused", " ", "[CLS]"}; + std::vector expected = {"unused", " ", "[CLS]"}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + EXPECT_MSTENSOR_EQ(ind, expected_tensor); iter->GetNextRow(&row); i++; } @@ -537,14 +566,16 @@ TEST_F(MindDataTestPipeline, TestBertTokenizerSuccess6) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"unused", " ", "[", "CLS", "]"}; + std::vector expected = {"unused", " ", "[", "CLS", "]"}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + EXPECT_MSTENSOR_EQ(ind, expected_tensor); iter->GetNextRow(&row); i++; } @@ -595,25 +626,37 @@ TEST_F(MindDataTestPipeline, TestBertTokenizerSuccess7) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected_tokens = {"i", "am", "mak", "##ing", "small", "mistake", - // "##s", "during", "work", "##ing", "hour", "##s"}; - // std::vector expected_offsets_start = {0, 2, 5, 8, 12, 18, 25, 27, 34, 38, 42, 46}; - // std::vector expected_offsets_limit = {1, 4, 8, 11, 17, 25, 26, 33, 38, 41, 46, 47}; + std::vector expected_tokens = {"i", "am", "mak", "##ing", "small", "mistake", + "##s", "during", "work", "##ing", "hour", "##s"}; + std::vector expected_offsets_start = {0, 2, 5, 8, 12, 18, 25, 27, 34, 38, 42, 46}; + std::vector expected_offsets_limit = {1, 4, 8, 11, 17, 25, 26, 33, 38, 41, 46, 47}; + + std::shared_ptr de_expected_tokens; + ASSERT_OK(Tensor::CreateFromVector(expected_tokens, &de_expected_tokens)); + mindspore::MSTensor ms_expected_tokens = + mindspore::MSTensor(std::make_shared(de_expected_tokens)); + + std::shared_ptr de_expected_offsets_start; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_start, &de_expected_offsets_start)); + mindspore::MSTensor ms_expected_offsets_start = + mindspore::MSTensor(std::make_shared(de_expected_offsets_start)); + + std::shared_ptr de_expected_offsets_limit; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_limit, &de_expected_offsets_limit)); + mindspore::MSTensor ms_expected_offsets_limit = + mindspore::MSTensor(std::make_shared(de_expected_offsets_limit)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["token"]; - // mindspore::MSTensor expected_token_tensor; - // Tensor::CreateFromVector(expected_tokens, &expected_token_tensor); - // EXPECT_EQ(*ind, *expected_token_tensor); - // auto start = row["offsets_start"]; - // mindspore::MSTensor expected_start_tensor; - // Tensor::CreateFromVector(expected_offsets_start, &expected_start_tensor); - // EXPECT_EQ(*start, *expected_start_tensor); - // auto limit = row["offsets_limit"]; - // mindspore::MSTensor expected_limit_tensor; - // Tensor::CreateFromVector(expected_offsets_limit, &expected_limit_tensor); - // EXPECT_EQ(*limit, *expected_limit_tensor); + auto ind = row["token"]; + EXPECT_MSTENSOR_EQ(ind, ms_expected_tokens); + + auto start = row["offsets_start"]; + EXPECT_MSTENSOR_EQ(start, ms_expected_offsets_start); + + auto limit = row["offsets_limit"]; + EXPECT_MSTENSOR_EQ(limit, ms_expected_offsets_limit); + iter->GetNextRow(&row); i++; } @@ -698,14 +741,16 @@ TEST_F(MindDataTestPipeline, TestCaseFoldSuccess) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"welcome to beijing!", "北京欢迎您!", "我喜欢english!", " "}; + std::vector expected = {"welcome to beijing!", "北京欢迎您!", "我喜欢english!", " "}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateScalar(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_tensor)); + mindspore::MSTensor ms_expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_tensor); iter->GetNextRow(&row); i++; } @@ -745,14 +790,16 @@ TEST_F(MindDataTestPipeline, TestJiebaTokenizerSuccess) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"今天天气", "太好了", "我们", "一起", "去", "外面", "玩吧"}; + std::vector expected = {"今天天气", "太好了", "我们", "一起", "去", "外面", "玩吧"}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + EXPECT_MSTENSOR_EQ(ind, expected_tensor); iter->GetNextRow(&row); i++; } @@ -792,14 +839,16 @@ TEST_F(MindDataTestPipeline, TestJiebaTokenizerSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"今天", "天气", "太", "好", "了", "我们", "一起", "去", "外面", "玩", "吧"}; + std::vector expected = {"今天", "天气", "太", "好", "了", "我们", "一起", "去", "外面", "玩", "吧"}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + EXPECT_MSTENSOR_EQ(ind, expected_tensor); iter->GetNextRow(&row); i++; } @@ -840,25 +889,36 @@ TEST_F(MindDataTestPipeline, TestJiebaTokenizerSuccess2) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"今天天气", "太好了", "我们", "一起", "去", "外面", "玩吧"}; + std::vector expected_tokens = {"今天天气", "太好了", "我们", "一起", "去", "外面", "玩吧"}; + std::vector expected_offsets_start = {0, 12, 21, 27, 33, 36, 42}; + std::vector expected_offsets_limit = {12, 21, 27, 33, 36, 42, 48}; + + std::shared_ptr de_expected_tokens; + ASSERT_OK(Tensor::CreateFromVector(expected_tokens, &de_expected_tokens)); + mindspore::MSTensor ms_expected_tokens = + mindspore::MSTensor(std::make_shared(de_expected_tokens)); - // std::vector expected_offsets_start = {0, 12, 21, 27, 33, 36, 42}; - // std::vector expected_offsets_limit = {12, 21, 27, 33, 36, 42, 48}; + std::shared_ptr de_expected_offsets_start; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_start, &de_expected_offsets_start)); + mindspore::MSTensor ms_expected_offsets_start = + mindspore::MSTensor(std::make_shared(de_expected_offsets_start)); + + std::shared_ptr de_expected_offsets_limit; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_limit, &de_expected_offsets_limit)); + mindspore::MSTensor ms_expected_offsets_limit = + mindspore::MSTensor(std::make_shared(de_expected_offsets_limit)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["offsets_start"]; - // auto ind1 = row["offsets_limit"]; - // auto token = row["token"]; - // mindspore::MSTensor expected_tensor; - // mindspore::MSTensor expected_tensor_offsets_start; - // mindspore::MSTensor expected_tensor_offsets_limit; - // Tensor::CreateFromVector(expected, &expected_tensor); - // Tensor::CreateFromVector(expected_offsets_start, &expected_tensor_offsets_start); - // Tensor::CreateFromVector(expected_offsets_limit, &expected_tensor_offsets_limit); - // EXPECT_EQ(*ind, *expected_tensor_offsets_start); - // EXPECT_EQ(*ind1, *expected_tensor_offsets_limit); - // EXPECT_EQ(*token, *expected_tensor); + auto ind = row["token"]; + EXPECT_MSTENSOR_EQ(ind, ms_expected_tokens); + + auto start = row["offsets_start"]; + EXPECT_MSTENSOR_EQ(start, ms_expected_offsets_start); + + auto limit = row["offsets_limit"]; + EXPECT_MSTENSOR_EQ(limit, ms_expected_offsets_limit); + iter->GetNextRow(&row); i++; } @@ -1003,14 +1063,16 @@ TEST_F(MindDataTestPipeline, TestJiebaTokenizerAddWord) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"男默女泪", "市", "长江大桥"}; + std::vector expected = {"男默女泪", "市", "长江大桥"}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + EXPECT_MSTENSOR_EQ(ind, expected_tensor); iter->GetNextRow(&row); i++; } @@ -1053,14 +1115,16 @@ TEST_F(MindDataTestPipeline, TestJiebaTokenizerAddWord1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"男默女泪", "市", "长江大桥"}; + std::vector expected = {"男默女泪", "市", "长江大桥"}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + EXPECT_MSTENSOR_EQ(ind, expected_tensor); iter->GetNextRow(&row); i++; } @@ -1103,14 +1167,16 @@ TEST_F(MindDataTestPipeline, TestJiebaTokenizerAddWord2) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"男默女泪", "市", "长江大桥"}; + std::vector expected = {"男默女泪", "市", "长江大桥"}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + EXPECT_MSTENSOR_EQ(ind, expected_tensor); iter->GetNextRow(&row); i++; } @@ -1153,14 +1219,16 @@ TEST_F(MindDataTestPipeline, TestJiebaTokenizerAddWord3) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"江州", "市长", "江大桥", "参加", "了", "长江大桥", "的", "通车", "仪式"}; + std::vector expected = {"江州", "市长", "江大桥", "参加", "了", "长江大桥", "的", "通车", "仪式"}; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateFromVector(expected, &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateFromVector(expected, &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + EXPECT_MSTENSOR_EQ(ind, expected_tensor); iter->GetNextRow(&row); i++; } @@ -1223,17 +1291,21 @@ TEST_F(MindDataTestPipeline, TestSlidingWindowSuccess) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = {{"This", "is", "a", "is", "a", "text", "a", "text", "file."}, - // {"Be", "happy", "every", "happy", "every", "day."}, - // {"Good", "luck", "to", "luck", "to", "everyone."}}; + std::vector> expected = {{"This", "is", "a", "is", "a", "text", "a", "text", "file."}, + {"Be", "happy", "every", "happy", "every", "day."}, + {"Good", "luck", "to", "luck", "to", "everyone."}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // int x = expected[i].size() / 3; - // Tensor::CreateFromVector(expected[i], TensorShape({x, 3}), &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + + std::shared_ptr de_expected_tensor; + int x = expected[i].size() / 3; + ASSERT_OK(Tensor::CreateFromVector(expected[i], TensorShape({x, 3}), &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -1273,16 +1345,20 @@ TEST_F(MindDataTestPipeline, TestSlidingWindowSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = {{"This", "is", "is", "a", "a", "text", "text", "file."}, - // {"Be", "happy", "happy", "every", "every", "day."}, - // {"Good", "luck", "luck", "to", "to", "everyone."}}; + std::vector> expected = {{"This", "is", "is", "a", "a", "text", "text", "file."}, + {"Be", "happy", "happy", "every", "every", "day."}, + {"Good", "luck", "luck", "to", "to", "everyone."}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // int x = expected[i].size() / 2; - // Tensor::CreateFromVector(expected[i], TensorShape({x, 2}), &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + + std::shared_ptr de_expected_tensor; + int x = expected[i].size() / 2; + ASSERT_OK(Tensor::CreateFromVector(expected[i], TensorShape({x, 2}), &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -1372,14 +1448,16 @@ TEST_F(MindDataTestPipeline, TestToNumberSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {-121, 14, -2219, 7623, -8162536, 162371864, -1726483716, 98921728421}; + std::vector expected = {-121, 14, -2219, 7623, -8162536, 162371864, -1726483716, 98921728421}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateScalar(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_tensor)); + mindspore::MSTensor ms_expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_tensor); iter->GetNextRow(&row); i++; } @@ -1425,14 +1503,16 @@ TEST_F(MindDataTestPipeline, TestToNumberSuccess2) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {-1.1, 1.4, -2219.321, 7623.453, -816256.234282, 162371864.243243}; + std::vector expected = {-1.1, 1.4, -2219.321, 7623.453, -816256.234282, 162371864.243243}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateScalar(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_tensor)); + mindspore::MSTensor ms_expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_tensor); iter->GetNextRow(&row); i++; } @@ -1673,20 +1753,27 @@ TEST_F(MindDataTestPipeline, TestTruncateSequencePairSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected1 = {{-29556, -29556}, {-18505, -18505}, {-25958, -25958}}; - // std::vector> expected2 = { - // {-1751672937, -1751672937}, {-656877352, -656877352}, {-606348325, -606348325}}; + std::vector> expected1 = {{-29556, -29556}, {-18505, -18505}, {-25958, -25958}}; + std::vector> expected2 = { + {-1751672937, -1751672937}, {-656877352, -656877352}, {-606348325, -606348325}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind1 = row["col1"]; - // auto ind2 = row["col2"]; - // mindspore::MSTensor expected_tensor1; - // mindspore::MSTensor expected_tensor2; - // Tensor::CreateFromVector(expected1[i], &expected_tensor1); - // Tensor::CreateFromVector(expected2[i], &expected_tensor2); - // EXPECT_EQ(*ind1, *expected_tensor1); - // EXPECT_EQ(*ind2, *expected_tensor2); + auto ind1 = row["col1"]; + auto ind2 = row["col2"]; + + std::shared_ptr de_expected_tensor1; + ASSERT_OK(Tensor::CreateFromVector(expected1[i], &de_expected_tensor1)); + mindspore::MSTensor expected_tensor1 = + mindspore::MSTensor(std::make_shared(de_expected_tensor1)); + EXPECT_MSTENSOR_EQ(ind1, expected_tensor1); + + std::shared_ptr de_expected_tensor2; + ASSERT_OK(Tensor::CreateFromVector(expected2[i], &de_expected_tensor2)); + mindspore::MSTensor expected_tensor2 = + mindspore::MSTensor(std::make_shared(de_expected_tensor2)); + EXPECT_MSTENSOR_EQ(ind2, expected_tensor2); + iter->GetNextRow(&row); i++; } @@ -1741,23 +1828,30 @@ TEST_F(MindDataTestPipeline, TestTruncateSequencePairSuccess2) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected1 = {{1785358954, 1785358954, 1785358954}, - // {-1195853640, -1195853640, -1195853640}, - // {0, 0, 0}, - // {1296911693, 1296911693, 1296911693}}; - // std::vector> expected2 = { - // {-1, -1}, {-1229782938247303442, -1229782938247303442}, {2314885530818453536, 2314885530818453536}, {-1, -1}}; + std::vector> expected1 = {{1785358954, 1785358954, 1785358954}, + {-1195853640, -1195853640, -1195853640}, + {0, 0, 0}, + {1296911693, 1296911693, 1296911693}}; + std::vector> expected2 = { + {-1, -1}, {-1229782938247303442, -1229782938247303442}, {2314885530818453536, 2314885530818453536}, {-1, -1}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind1 = row["col1"]; - // auto ind2 = row["col2"]; - // mindspore::MSTensor expected_tensor1; - // mindspore::MSTensor expected_tensor2; - // Tensor::CreateFromVector(expected1[i], &expected_tensor1); - // Tensor::CreateFromVector(expected2[i], &expected_tensor2); - // EXPECT_EQ(*ind1, *expected_tensor1); - // EXPECT_EQ(*ind2, *expected_tensor2); + auto ind1 = row["col1"]; + auto ind2 = row["col2"]; + + std::shared_ptr de_expected_tensor1; + ASSERT_OK(Tensor::CreateFromVector(expected1[i], &de_expected_tensor1)); + mindspore::MSTensor expected_tensor1 = + mindspore::MSTensor(std::make_shared(de_expected_tensor1)); + EXPECT_MSTENSOR_EQ(ind1, expected_tensor1); + + std::shared_ptr de_expected_tensor2; + ASSERT_OK(Tensor::CreateFromVector(expected2[i], &de_expected_tensor2)); + mindspore::MSTensor expected_tensor2 = + mindspore::MSTensor(std::make_shared(de_expected_tensor2)); + EXPECT_MSTENSOR_EQ(ind2, expected_tensor2); + iter->GetNextRow(&row); i++; } @@ -1827,18 +1921,21 @@ TEST_F(MindDataTestPipeline, TestNgramSuccess) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = {{"_ This", "This is", "is a", "a text", "text file.", "file. _"}, - // {"_ Be", "Be happy", "happy every", "every day.", "day. _"}, - // {"_ Good", "Good luck", "luck to", "to everyone.", "everyone. - // _"}}; + std::vector> expected = {{"_ This", "This is", "is a", "a text", "text file.", "file. _"}, + {"_ Be", "Be happy", "happy every", "every day.", "day. _"}, + {"_ Good", "Good luck", "luck to", "to everyone.", "everyone. _"}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + + std::shared_ptr de_expected_tensor; + int x = expected[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected[i], TensorShape({x}), &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -1877,22 +1974,26 @@ TEST_F(MindDataTestPipeline, TestNgramSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = { - // {"&-This", "This-is", "is-a", "a-text", "text-file.", "file.-&", "&-&-This", "&-This-is", "This-is-a", - // "is-a-text", - // "a-text-file.", "text-file.-&", "file.-&-&"}, - // {"&-Be", "Be-happy", "happy-every", "every-day.", "day.-&", "&-&-Be", "&-Be-happy", "Be-happy-every", - // "happy-every-day.", "every-day.-&", "day.-&-&"}, - // {"&-Good", "Good-luck", "luck-to", "to-everyone.", "everyone.-&", "&-&-Good", "&-Good-luck", "Good-luck-to", - // "luck-to-everyone.", "to-everyone.-&", "everyone.-&-&"}}; + std::vector> expected = { + {"&-This", "This-is", "is-a", "a-text", "text-file.", "file.-&", "&-&-This", "&-This-is", "This-is-a", + "is-a-text", + "a-text-file.", "text-file.-&", "file.-&-&"}, + {"&-Be", "Be-happy", "happy-every", "every-day.", "day.-&", "&-&-Be", "&-Be-happy", "Be-happy-every", + "happy-every-day.", "every-day.-&", "day.-&-&"}, + {"&-Good", "Good-luck", "luck-to", "to-everyone.", "everyone.-&", "&-&-Good", "&-Good-luck", "Good-luck-to", + "luck-to-everyone.", "to-everyone.-&", "everyone.-&-&"}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + + std::shared_ptr de_expected_tensor; + int x = expected[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected[i], TensorShape({x}), &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -2044,14 +2145,16 @@ TEST_F(MindDataTestPipeline, TestNormalizeUTF8Success) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"ṩ", "ḍ̇", "q̣̇", "fi", "25", "ṩ"}; + std::vector expected = {"ṩ", "ḍ̇", "q̣̇", "fi", "25", "ṩ"}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateScalar(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_tensor)); + mindspore::MSTensor ms_expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_tensor); iter->GetNextRow(&row); i++; } @@ -2088,14 +2191,16 @@ TEST_F(MindDataTestPipeline, TestNormalizeUTF8Success1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"ṩ", "ḍ̇", "q̣̇", "fi", "2⁵", "ẛ̣"}; + std::vector expected = {"ṩ", "ḍ̇", "q̣̇", "fi", "2⁵", "ẛ̣"}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateScalar(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_tensor)); + mindspore::MSTensor ms_expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_tensor); iter->GetNextRow(&row); i++; } @@ -2132,14 +2237,16 @@ TEST_F(MindDataTestPipeline, TestNormalizeUTF8Success2) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"ṩ", "ḍ̇", "q̣̇", "fi", "2⁵", "ẛ̣"}; + std::vector expected = {"ṩ", "ḍ̇", "q̣̇", "fi", "2⁵", "ẛ̣"}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateScalar(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_tensor)); + mindspore::MSTensor ms_expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_tensor); iter->GetNextRow(&row); i++; } @@ -2176,14 +2283,16 @@ TEST_F(MindDataTestPipeline, TestNormalizeUTF8Success3) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"ṩ", "ḍ̇", "q̣̇", "fi", "25", "ṩ"}; + std::vector expected = {"ṩ", "ḍ̇", "q̣̇", "fi", "25", "ṩ"}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateScalar(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_tensor)); + mindspore::MSTensor ms_expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_tensor); iter->GetNextRow(&row); i++; } @@ -2220,15 +2329,17 @@ TEST_F(MindDataTestPipeline, TestRegexReplaceSuccess) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"Hello_World", "Let's_Go", "1:hello", "2:world", - // "31:beijing", "Welcome_to_China!", "_我_不想_长大_", "Welcome_to_Shenzhen!"}; + std::vector expected = {"Hello_World", "Let's_Go", "1:hello", "2:world", + "31:beijing", "Welcome_to_China!", "_我_不想_长大_", "Welcome_to_Shenzhen!"}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateScalar(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_tensor)); + mindspore::MSTensor ms_expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_tensor); iter->GetNextRow(&row); i++; } @@ -2265,16 +2376,17 @@ TEST_F(MindDataTestPipeline, TestRegexReplaceSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector expected = {"Hello_World", "Let's_Go", "1:hello", "2:world", - // "31:beijing", "Welcome_to China!", "_我 不想 长大 ", "Welcome_to - // Shenzhen!"}; + std::vector expected = {"Hello_World", "Let's_Go", "1:hello", "2:world", + "31:beijing", "Welcome_to China!", "_我 不想 长大 ", "Welcome_to Shenzhen!"}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // Tensor::CreateScalar(expected[i], &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + std::shared_ptr de_expected_tensor; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_tensor)); + mindspore::MSTensor ms_expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_tensor); iter->GetNextRow(&row); i++; } @@ -2311,22 +2423,26 @@ TEST_F(MindDataTestPipeline, TestRegexTokenizerSuccess) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = {{"Hello", " ", "World"}, - // {"Let's", " ", "Go"}, - // {"1:hello"}, - // {"2:world"}, - // {"31:beijing"}, - // {"Welcome", " ", "to", " ", "China!"}, - // {" ", "我", " ", "不想", " ", "长大", " "}, - // {"Welcome", " ", "to", " ", "Shenzhen!"}}; + std::vector> expected = {{"Hello", " ", "World"}, + {"Let's", " ", "Go"}, + {"1:hello"}, + {"2:world"}, + {"31:beijing"}, + {"Welcome", " ", "to", " ", "China!"}, + {" ", "我", " ", "不想", " ", "长大", " "}, + {"Welcome", " ", "to", " ", "Shenzhen!"}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + + std::shared_ptr de_expected_tensor; + int x = expected[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected[i], TensorShape({x}), &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -2364,35 +2480,44 @@ TEST_F(MindDataTestPipeline, TestRegexTokenizerSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = {{"Hello", " ", "World"}, - // {"Let's", " ", "Go"}, - // {"1:hello"}, - // {"2:world"}, - // {"31:beijing"}, - // {"Welcome", " ", "to", " ", "China!"}, - // {" ", "我", " ", "不想", " ", "长大", " "}, - // {"Welcome", " ", "to", " ", "Shenzhen!"}}; + std::vector> expected_tokens = {{"Hello", " ", "World"}, + {"Let's", " ", "Go"}, + {"1:hello"}, + {"2:world"}, + {"31:beijing"}, + {"Welcome", " ", "to", " ", "China!"}, + {" ", "我", " ", "不想", " ", "长大", " "}, + {"Welcome", " ", "to", " ", "Shenzhen!"}}; - // std::vector> expected_offsets_start = { - // {0, 5, 6}, {0, 5, 6}, {0}, {0}, {0}, {0, 7, 8, 10, 11}, {0, 2, 5, 6, 12, 14, 20}, {0, 7, 8, 10, 11}}; - // std::vector> expected_offsets_limit = { - // {5, 6, 11}, {5, 6, 8}, {7}, {7}, {10}, {7, 8, 10, 11, 17}, {2, 5, 6, 12, 14, 20, 21}, {7, 8, 10, 11, 20}}; + std::vector> expected_offsets_start = { + {0, 5, 6}, {0, 5, 6}, {0}, {0}, {0}, {0, 7, 8, 10, 11}, {0, 2, 5, 6, 12, 14, 20}, {0, 7, 8, 10, 11}}; + std::vector> expected_offsets_limit = { + {5, 6, 11}, {5, 6, 8}, {7}, {7}, {10}, {7, 8, 10, 11, 17}, {2, 5, 6, 12, 14, 20, 21}, {7, 8, 10, 11, 20}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["offsets_start"]; - // auto ind1 = row["offsets_limit"]; - // auto token = row["token"]; - // mindspore::MSTensor expected_tensor; - // mindspore::MSTensor expected_tensor_offsets_start; - // mindspore::MSTensor expected_tensor_offsets_limit; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // Tensor::CreateFromVector(expected_offsets_start[i], TensorShape({x}), &expected_tensor_offsets_start); - // Tensor::CreateFromVector(expected_offsets_limit[i], TensorShape({x}), &expected_tensor_offsets_limit); - // EXPECT_EQ(*ind, *expected_tensor_offsets_start); - // EXPECT_EQ(*ind1, *expected_tensor_offsets_limit); - // EXPECT_EQ(*token, *expected_tensor); + auto token = row["token"]; + auto start = row["offsets_start"]; + auto limit = row["offsets_limit"]; + + std::shared_ptr de_expected_tokens; + int x = expected_tokens[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected_tokens[i], TensorShape({x}), &de_expected_tokens)); + mindspore::MSTensor ms_expected_tokens = + mindspore::MSTensor(std::make_shared(de_expected_tokens)); + EXPECT_MSTENSOR_EQ(token, ms_expected_tokens); + + std::shared_ptr de_expected_offsets_start; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_start[i], TensorShape({x}), &de_expected_offsets_start)); + mindspore::MSTensor ms_expected_offsets_start = + mindspore::MSTensor(std::make_shared(de_expected_offsets_start)); + EXPECT_MSTENSOR_EQ(start, ms_expected_offsets_start); + + std::shared_ptr de_expected_offsets_limit; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_limit[i], TensorShape({x}), &de_expected_offsets_limit)); + mindspore::MSTensor ms_expected_offsets_limit = + mindspore::MSTensor(std::make_shared(de_expected_offsets_limit)); + EXPECT_MSTENSOR_EQ(limit, ms_expected_offsets_limit); iter->GetNextRow(&row); i++; @@ -2430,19 +2555,23 @@ TEST_F(MindDataTestPipeline, TestUnicodeCharTokenizerSuccess) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = { - // {"W", "e", "l", "c", "o", "m", "e", " ", "t", "o", " ", "B", "e", "i", "j", "i", "n", "g", "!"}, - // {"北", "京", "欢", "迎", "您", "!"}, - // {"我", "喜", "欢", "E", "n", "g", "l", "i", "s", "h", "!"}, - // {" ", " "}}; + std::vector> expected = { + {"W", "e", "l", "c", "o", "m", "e", " ", "t", "o", " ", "B", "e", "i", "j", "i", "n", "g", "!"}, + {"北", "京", "欢", "迎", "您", "!"}, + {"我", "喜", "欢", "E", "n", "g", "l", "i", "s", "h", "!"}, + {" ", " "}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + + std::shared_ptr de_expected_tensor; + int x = expected[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected[i], TensorShape({x}), &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -2480,38 +2609,48 @@ TEST_F(MindDataTestPipeline, TestUnicodeCharTokenizerSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = { - // {"W", "e", "l", "c", "o", "m", "e", " ", "t", "o", " ", "B", "e", "i", "j", "i", "n", "g", "!"}, - // {"北", "京", "欢", "迎", "您", "!"}, - // {"我", "喜", "欢", "E", "n", "g", "l", "i", "s", "h", "!"}, - // {" ", " "}}; + std::vector> expected_tokens = { + {"W", "e", "l", "c", "o", "m", "e", " ", "t", "o", " ", "B", "e", "i", "j", "i", "n", "g", "!"}, + {"北", "京", "欢", "迎", "您", "!"}, + {"我", "喜", "欢", "E", "n", "g", "l", "i", "s", "h", "!"}, + {" ", " "}}; - // std::vector> expected_offsets_start = { - // {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18}, - // {0, 3, 6, 9, 12, 15}, - // {0, 3, 6, 9, 10, 11, 12, 13, 14, 15, 16}, - // {0, 1}}; - // std::vector> expected_offsets_limit = { - // {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19}, - // {3, 6, 9, 12, 15, 18}, - // {3, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17}, - // {1, 2}}; + std::vector> expected_offsets_start = { + {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18}, + {0, 3, 6, 9, 12, 15}, + {0, 3, 6, 9, 10, 11, 12, 13, 14, 15, 16}, + {0, 1}}; + + std::vector> expected_offsets_limit = { + {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19}, + {3, 6, 9, 12, 15, 18}, + {3, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17}, + {1, 2}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["offsets_start"]; - // auto ind1 = row["offsets_limit"]; - // auto token = row["token"]; - // mindspore::MSTensor expected_tensor; - // mindspore::MSTensor expected_tensor_offsets_start; - // mindspore::MSTensor expected_tensor_offsets_limit; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // Tensor::CreateFromVector(expected_offsets_start[i], TensorShape({x}), &expected_tensor_offsets_start); - // Tensor::CreateFromVector(expected_offsets_limit[i], TensorShape({x}), &expected_tensor_offsets_limit); - // EXPECT_EQ(*ind, *expected_tensor_offsets_start); - // EXPECT_EQ(*ind1, *expected_tensor_offsets_limit); - // EXPECT_EQ(*token, *expected_tensor); + auto token = row["token"]; + auto start = row["offsets_start"]; + auto limit = row["offsets_limit"]; + + std::shared_ptr de_expected_tokens; + int x = expected_tokens[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected_tokens[i], TensorShape({x}), &de_expected_tokens)); + mindspore::MSTensor ms_expected_tokens = + mindspore::MSTensor(std::make_shared(de_expected_tokens)); + EXPECT_MSTENSOR_EQ(token, ms_expected_tokens); + + std::shared_ptr de_expected_offsets_start; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_start[i], TensorShape({x}), &de_expected_offsets_start)); + mindspore::MSTensor ms_expected_offsets_start = + mindspore::MSTensor(std::make_shared(de_expected_offsets_start)); + EXPECT_MSTENSOR_EQ(start, ms_expected_offsets_start); + + std::shared_ptr de_expected_offsets_limit; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_limit[i], TensorShape({x}), &de_expected_offsets_limit)); + mindspore::MSTensor ms_expected_offsets_limit = + mindspore::MSTensor(std::make_shared(de_expected_offsets_limit)); + EXPECT_MSTENSOR_EQ(limit, ms_expected_offsets_limit); iter->GetNextRow(&row); i++; @@ -2549,16 +2688,20 @@ TEST_F(MindDataTestPipeline, TestUnicodeScriptTokenizerSuccess) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = { - // {"Welcome", "to", "Beijing", "!"}, {"北京欢迎您", "!"}, {"我喜欢", "English", "!"}, {""}}; + std::vector> expected = { + {"Welcome", "to", "Beijing", "!"}, {"北京欢迎您", "!"}, {"我喜欢", "English", "!"}, {""}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + + std::shared_ptr de_expected_tensor; + int x = expected[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected[i], TensorShape({x}), &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -2596,16 +2739,20 @@ TEST_F(MindDataTestPipeline, TestUnicodeScriptTokenizerSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = { - // {"Welcome", " ", "to", " ", "Beijing", "!"}, {"北京欢迎您", "!"}, {"我喜欢", "English", "!"}, {" "}}; + std::vector> expected = { + {"Welcome", " ", "to", " ", "Beijing", "!"}, {"北京欢迎您", "!"}, {"我喜欢", "English", "!"}, {" "}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + + std::shared_ptr de_expected_tensor; + int x = expected[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected[i], TensorShape({x}), &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -2645,27 +2792,36 @@ TEST_F(MindDataTestPipeline, TestUnicodeScriptTokenizerSuccess2) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = { - // {"Welcome", "to", "Beijing", "!"}, {"北京欢迎您", "!"}, {"我喜欢", "English", "!"}, {""}}; + std::vector> expected_tokens = { + {"Welcome", "to", "Beijing", "!"}, {"北京欢迎您", "!"}, {"我喜欢", "English", "!"}, {""}}; - // std::vector> expected_offsets_start = {{0, 8, 11, 18}, {0, 15}, {0, 9, 16}, {0}}; - // std::vector> expected_offsets_limit = {{7, 10, 18, 19}, {15, 18}, {9, 16, 17}, {0}}; + std::vector> expected_offsets_start = {{0, 8, 11, 18}, {0, 15}, {0, 9, 16}, {0}}; + std::vector> expected_offsets_limit = {{7, 10, 18, 19}, {15, 18}, {9, 16, 17}, {0}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["offsets_start"]; - // auto ind1 = row["offsets_limit"]; - // auto token = row["token"]; - // mindspore::MSTensor expected_tensor; - // mindspore::MSTensor expected_tensor_offsets_start; - // mindspore::MSTensor expected_tensor_offsets_limit; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // Tensor::CreateFromVector(expected_offsets_start[i], TensorShape({x}), &expected_tensor_offsets_start); - // Tensor::CreateFromVector(expected_offsets_limit[i], TensorShape({x}), &expected_tensor_offsets_limit); - // EXPECT_EQ(*ind, *expected_tensor_offsets_start); - // EXPECT_EQ(*ind1, *expected_tensor_offsets_limit); - // EXPECT_EQ(*token, *expected_tensor); + auto token = row["token"]; + auto start = row["offsets_start"]; + auto limit = row["offsets_limit"]; + + std::shared_ptr de_expected_tokens; + int x = expected_tokens[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected_tokens[i], TensorShape({x}), &de_expected_tokens)); + mindspore::MSTensor ms_expected_tokens = + mindspore::MSTensor(std::make_shared(de_expected_tokens)); + EXPECT_MSTENSOR_EQ(token, ms_expected_tokens); + + std::shared_ptr de_expected_offsets_start; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_start[i], TensorShape({x}), &de_expected_offsets_start)); + mindspore::MSTensor ms_expected_offsets_start = + mindspore::MSTensor(std::make_shared(de_expected_offsets_start)); + EXPECT_MSTENSOR_EQ(start, ms_expected_offsets_start); + + std::shared_ptr de_expected_offsets_limit; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_limit[i], TensorShape({x}), &de_expected_offsets_limit)); + mindspore::MSTensor ms_expected_offsets_limit = + mindspore::MSTensor(std::make_shared(de_expected_offsets_limit)); + EXPECT_MSTENSOR_EQ(limit, ms_expected_offsets_limit); iter->GetNextRow(&row); i++; @@ -2705,27 +2861,36 @@ TEST_F(MindDataTestPipeline, TestUnicodeScriptTokenizerSuccess3) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = { - // {"Welcome", " ", "to", " ", "Beijing", "!"}, {"北京欢迎您", "!"}, {"我喜欢", "English", "!"}, {" "}}; + std::vector> expected_tokens = { + {"Welcome", " ", "to", " ", "Beijing", "!"}, {"北京欢迎您", "!"}, {"我喜欢", "English", "!"}, {" "}}; - // std::vector> expected_offsets_start = {{0, 7, 8, 10, 11, 18}, {0, 15}, {0, 9, 16}, {0}}; - // std::vector> expected_offsets_limit = {{7, 8, 10, 11, 18, 19}, {15, 18}, {9, 16, 17}, {2}}; + std::vector> expected_offsets_start = {{0, 7, 8, 10, 11, 18}, {0, 15}, {0, 9, 16}, {0}}; + std::vector> expected_offsets_limit = {{7, 8, 10, 11, 18, 19}, {15, 18}, {9, 16, 17}, {2}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["offsets_start"]; - // auto ind1 = row["offsets_limit"]; - // auto token = row["token"]; - // mindspore::MSTensor expected_tensor; - // mindspore::MSTensor expected_tensor_offsets_start; - // mindspore::MSTensor expected_tensor_offsets_limit; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // Tensor::CreateFromVector(expected_offsets_start[i], TensorShape({x}), &expected_tensor_offsets_start); - // Tensor::CreateFromVector(expected_offsets_limit[i], TensorShape({x}), &expected_tensor_offsets_limit); - // EXPECT_EQ(*ind, *expected_tensor_offsets_start); - // EXPECT_EQ(*ind1, *expected_tensor_offsets_limit); - // EXPECT_EQ(*token, *expected_tensor); + auto token = row["token"]; + auto start = row["offsets_start"]; + auto limit = row["offsets_limit"]; + + std::shared_ptr de_expected_tokens; + int x = expected_tokens[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected_tokens[i], TensorShape({x}), &de_expected_tokens)); + mindspore::MSTensor ms_expected_tokens = + mindspore::MSTensor(std::make_shared(de_expected_tokens)); + EXPECT_MSTENSOR_EQ(token, ms_expected_tokens); + + std::shared_ptr de_expected_offsets_start; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_start[i], TensorShape({x}), &de_expected_offsets_start)); + mindspore::MSTensor ms_expected_offsets_start = + mindspore::MSTensor(std::make_shared(de_expected_offsets_start)); + EXPECT_MSTENSOR_EQ(start, ms_expected_offsets_start); + + std::shared_ptr de_expected_offsets_limit; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_limit[i], TensorShape({x}), &de_expected_offsets_limit)); + mindspore::MSTensor ms_expected_offsets_limit = + mindspore::MSTensor(std::make_shared(de_expected_offsets_limit)); + EXPECT_MSTENSOR_EQ(limit, ms_expected_offsets_limit); iter->GetNextRow(&row); i++; @@ -2763,16 +2928,20 @@ TEST_F(MindDataTestPipeline, TestWhitespaceTokenizerSuccess) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = { - // {"This", "is", "a", "text", "file."}, {"Be", "happy", "every", "day."}, {"Good", "luck", "to", "everyone."}}; + std::vector> expected = { + {"This", "is", "a", "text", "file."}, {"Be", "happy", "every", "day."}, {"Good", "luck", "to", "everyone."}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["text"]; - // mindspore::MSTensor expected_tensor; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // EXPECT_EQ(*ind, *expected_tensor); + auto ind = row["text"]; + + std::shared_ptr de_expected_tensor; + int x = expected[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected[i], TensorShape({x}), &de_expected_tensor)); + mindspore::MSTensor expected_tensor = + mindspore::MSTensor(std::make_shared(de_expected_tensor)); + EXPECT_MSTENSOR_EQ(ind, expected_tensor); + iter->GetNextRow(&row); i++; } @@ -2810,27 +2979,36 @@ TEST_F(MindDataTestPipeline, TestWhitespaceTokenizerSuccess1) { std::unordered_map row; iter->GetNextRow(&row); - // std::vector> expected = { - // {"Welcome", "to", "Beijing!"}, {"北京欢迎您!"}, {"我喜欢English!"}, {""}}; + std::vector> expected_tokens = { + {"Welcome", "to", "Beijing!"}, {"北京欢迎您!"}, {"我喜欢English!"}, {""}}; - // std::vector> expected_offsets_start = {{0, 8, 11}, {0}, {0}, {0}}; - // std::vector> expected_offsets_limit = {{7, 10, 19}, {18}, {17}, {0}}; + std::vector> expected_offsets_start = {{0, 8, 11}, {0}, {0}, {0}}; + std::vector> expected_offsets_limit = {{7, 10, 19}, {18}, {17}, {0}}; uint64_t i = 0; while (row.size() != 0) { - // auto ind = row["offsets_start"]; - // auto ind1 = row["offsets_limit"]; - // auto token = row["token"]; - // mindspore::MSTensor expected_tensor; - // mindspore::MSTensor expected_tensor_offsets_start; - // mindspore::MSTensor expected_tensor_offsets_limit; - // int x = expected[i].size(); - // Tensor::CreateFromVector(expected[i], TensorShape({x}), &expected_tensor); - // Tensor::CreateFromVector(expected_offsets_start[i], TensorShape({x}), &expected_tensor_offsets_start); - // Tensor::CreateFromVector(expected_offsets_limit[i], TensorShape({x}), &expected_tensor_offsets_limit); - // EXPECT_EQ(*ind, *expected_tensor_offsets_start); - // EXPECT_EQ(*ind1, *expected_tensor_offsets_limit); - // EXPECT_EQ(*token, *expected_tensor); + auto token = row["token"]; + auto start = row["offsets_start"]; + auto limit = row["offsets_limit"]; + + std::shared_ptr de_expected_tokens; + int x = expected_tokens[i].size(); + ASSERT_OK(Tensor::CreateFromVector(expected_tokens[i], TensorShape({x}), &de_expected_tokens)); + mindspore::MSTensor ms_expected_tokens = + mindspore::MSTensor(std::make_shared(de_expected_tokens)); + EXPECT_MSTENSOR_EQ(token, ms_expected_tokens); + + std::shared_ptr de_expected_offsets_start; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_start[i], TensorShape({x}), &de_expected_offsets_start)); + mindspore::MSTensor ms_expected_offsets_start = + mindspore::MSTensor(std::make_shared(de_expected_offsets_start)); + EXPECT_MSTENSOR_EQ(start, ms_expected_offsets_start); + + std::shared_ptr de_expected_offsets_limit; + ASSERT_OK(Tensor::CreateFromVector(expected_offsets_limit[i], TensorShape({x}), &de_expected_offsets_limit)); + mindspore::MSTensor ms_expected_offsets_limit = + mindspore::MSTensor(std::make_shared(de_expected_offsets_limit)); + EXPECT_MSTENSOR_EQ(limit, ms_expected_offsets_limit); iter->GetNextRow(&row); i++; diff --git a/tests/ut/cpp/dataset/c_api_text_vocab_test.cc b/tests/ut/cpp/dataset/c_api_text_vocab_test.cc index caac9a773b0..5c970227640 100644 --- a/tests/ut/cpp/dataset/c_api_text_vocab_test.cc +++ b/tests/ut/cpp/dataset/c_api_text_vocab_test.cc @@ -35,6 +35,12 @@ class MindDataTestPipeline : public UT::DatasetOpTesting { protected: }; +// Macro to compare 2 MSTensors as not equal; compare datasize only +#define EXPECT_MSTENSOR_DATA_NE(_mstensor1, _mstensor2) \ +do { \ + EXPECT_NE(_mstensor1.DataSize(), _mstensor2.DataSize()); \ +} while (false) + TEST_F(MindDataTestPipeline, TestVocabLookupOp) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVocabLookupOp."; @@ -67,13 +73,17 @@ TEST_F(MindDataTestPipeline, TestVocabLookupOp) { iter->GetNextRow(&row); uint64_t i = 0; - // std::vector expected = {2, 1, 4, 5, 6, 7}; + std::vector expected = {2, 1, 4, 5, 6, 7}; while (row.size() != 0) { - // auto ind = row["text"]; - // MS_LOG(INFO) << ind->shape() << " " << *ind; - // mindspore::MSTensor expected_item; - // Tensor::CreateScalar(expected[i], &expected_item); - // EXPECT_EQ(*ind, *expected_item); + auto ind = row["text"]; + MS_LOG(INFO) << ind.Shape(); + TEST_MS_LOG_MSTENSOR(INFO, "ind: ", ind); + std::shared_ptr de_expected_item; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_item)); + mindspore::MSTensor ms_expected_item = + mindspore::MSTensor(std::make_shared(de_expected_item)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_item); + iter->GetNextRow(&row); i++; } @@ -111,13 +121,17 @@ TEST_F(MindDataTestPipeline, TestVocabLookupOpEmptyString) { iter->GetNextRow(&row); uint64_t i = 0; - // std::vector expected = {2, 1, 4, 5, 6, 7}; + std::vector expected = {2, 1, 4, 5, 6, 7}; while (row.size() != 0) { - // auto ind = row["text"]; - // MS_LOG(INFO) << ind->shape() << " " << *ind; - // mindspore::MSTensor expected_item; - // Tensor::CreateScalar(expected[i], &expected_item); - // EXPECT_EQ(*ind, *expected_item); + auto ind = row["text"]; + MS_LOG(INFO) << ind.Shape(); + TEST_MS_LOG_MSTENSOR(INFO, "ind: ", ind); + std::shared_ptr de_expected_item; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_item)); + mindspore::MSTensor ms_expected_item = + mindspore::MSTensor(std::make_shared(de_expected_item)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_item); + iter->GetNextRow(&row); i++; } @@ -207,13 +221,17 @@ TEST_F(MindDataTestPipeline, TestVocabFromDataset) { iter->GetNextRow(&row); uint64_t i = 0; - // std::vector expected = {4, 5, 3, 6, 7, 2}; + std::vector expected = {4, 5, 3, 6, 7, 2}; while (row.size() != 0) { - // auto ind = row["text"]; - // MS_LOG(INFO) << ind->shape() << " " << *ind; - // mindspore::MSTensor expected_item; - // Tensor::CreateScalar(expected[i], &expected_item); - // EXPECT_EQ(*ind, *expected_item); + auto ind = row["text"]; + MS_LOG(INFO) << ind.Shape(); + TEST_MS_LOG_MSTENSOR(INFO, "ind: ", ind); + std::shared_ptr de_expected_item; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_item)); + mindspore::MSTensor ms_expected_item = + mindspore::MSTensor(std::make_shared(de_expected_item)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_item); + iter->GetNextRow(&row); i++; } @@ -253,16 +271,25 @@ TEST_F(MindDataTestPipeline, TestVocabFromDatasetDefault) { iter->GetNextRow(&row); uint64_t i = 0; - // std::vector expected = {2, 3, 1, 4, 5, 0}; - // std::vector not_expected = {2, 3, 1, 4, 5, 0}; + std::vector expected = {2, 3, 1, 4, 5, 0}; + std::vector not_expected = {2, 3, 1, 4, 5, 0}; while (row.size() != 0) { - // auto ind = row["text"]; - // MS_LOG(INFO) << ind->shape() << " " << *ind; - // mindspore::MSTensor expected_item, not_expected_item; - // Tensor::CreateScalar(expected[i], &expected_item); - // Tensor::CreateScalar(not_expected[i], ¬_expected_item); - // EXPECT_EQ(*ind, *expected_item); - // EXPECT_NE(*ind, *not_expected_item); + auto ind = row["text"]; + MS_LOG(INFO) << ind.Shape(); + TEST_MS_LOG_MSTENSOR(INFO, "ind: ", ind); + + std::shared_ptr de_expected_item; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_item)); + mindspore::MSTensor ms_expected_item = + mindspore::MSTensor(std::make_shared(de_expected_item)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_item); + + std::shared_ptr de_not_expected_item; + ASSERT_OK(Tensor::CreateScalar(not_expected[i], &de_not_expected_item)); + mindspore::MSTensor ms_not_expected_item = + mindspore::MSTensor(std::make_shared(de_not_expected_item)); + EXPECT_MSTENSOR_DATA_NE(ind, ms_not_expected_item); + iter->GetNextRow(&row); i++; } @@ -361,16 +388,25 @@ TEST_F(MindDataTestPipeline, TestVocabFromDatasetInt64) { iter->GetNextRow(&row); uint64_t i = 0; - // std::vector expected = {2, 3, 1, 4, 5, 0}; - // std::vector not_expected = {2, 3, 1, 4, 5, 0}; + std::vector expected = {2, 3, 1, 4, 5, 0}; + std::vector not_expected = {2, 3, 1, 4, 5, 0}; while (row.size() != 0) { - // auto ind = row["text"]; - // MS_LOG(INFO) << ind->shape() << " " << *ind; - // mindspore::MSTensor expected_item, not_expected_item; - // Tensor::CreateScalar(expected[i], &expected_item); - // Tensor::CreateScalar(not_expected[i], ¬_expected_item); - // EXPECT_EQ(*ind, *expected_item); - // EXPECT_NE(*ind, *not_expected_item); + auto ind = row["text"]; + MS_LOG(INFO) << ind.Shape(); + TEST_MS_LOG_MSTENSOR(INFO, "ind: ", ind); + + std::shared_ptr de_expected_item; + ASSERT_OK(Tensor::CreateScalar(expected[i], &de_expected_item)); + mindspore::MSTensor ms_expected_item = + mindspore::MSTensor(std::make_shared(de_expected_item)); + EXPECT_MSTENSOR_EQ(ind, ms_expected_item); + + std::shared_ptr de_not_expected_item; + ASSERT_OK(Tensor::CreateScalar(not_expected[i], &de_not_expected_item)); + mindspore::MSTensor ms_not_expected_item = + mindspore::MSTensor(std::make_shared(de_not_expected_item)); + EXPECT_MSTENSOR_DATA_NE(ind, ms_not_expected_item); + iter->GetNextRow(&row); i++; } diff --git a/tests/ut/cpp/dataset/c_api_transforms_test.cc b/tests/ut/cpp/dataset/c_api_transforms_test.cc index 53e8ba1eb00..d9d3fa0ecc8 100644 --- a/tests/ut/cpp/dataset/c_api_transforms_test.cc +++ b/tests/ut/cpp/dataset/c_api_transforms_test.cc @@ -164,12 +164,10 @@ TEST_F(MindDataTestPipeline, TestDuplicateSuccess) { uint64_t i = 0; while (row.size() != 0) { i++; - // FIXME - // auto image = row["image"]; - // auto image_copy = row["image_copy"]; - // MS_LOG(INFO) << "Tensor image shape: " << image.Shape(); - // auto n = memcmp(&image, &image_copy, image.DataSize()); - // EXPECT_EQ(n, 0); + auto image = row["image"]; + auto image_copy = row["image_copy"]; + MS_LOG(INFO) << "Tensor image shape: " << image.Shape(); + EXPECT_MSTENSOR_EQ(image, image_copy); iter->GetNextRow(&row); } diff --git a/tests/ut/cpp/dataset/c_api_vision_r_to_z_test.cc b/tests/ut/cpp/dataset/c_api_vision_r_to_z_test.cc index 88715b71cd4..9bd69b616a6 100644 --- a/tests/ut/cpp/dataset/c_api_vision_r_to_z_test.cc +++ b/tests/ut/cpp/dataset/c_api_vision_r_to_z_test.cc @@ -66,7 +66,7 @@ TEST_F(MindDataTestPipeline, TestRescaleSucess1) { auto image1 = row1["image"]; - // EXPECT_EQ(*image, *image1); + EXPECT_MSTENSOR_EQ(image, image1); // Manually terminate the pipeline iter1->Stop(); diff --git a/tests/ut/cpp/dataset/common/common.cc b/tests/ut/cpp/dataset/common/common.cc index 975a23c77d8..03884b0c66a 100644 --- a/tests/ut/cpp/dataset/common/common.cc +++ b/tests/ut/cpp/dataset/common/common.cc @@ -1,5 +1,5 @@ /** - * Copyright 2019 Huawei Technologies Co., Ltd + * Copyright 2019-2021 Huawei Technologies Co., Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,8 +14,8 @@ * limitations under the License. */ #include "common.h" -#include #include +#include #include #include diff --git a/tests/ut/cpp/dataset/common/common.h b/tests/ut/cpp/dataset/common/common.h index 3bf1491aa60..b80871fc2a8 100644 --- a/tests/ut/cpp/dataset/common/common.h +++ b/tests/ut/cpp/dataset/common/common.h @@ -71,6 +71,14 @@ do { _mstensor2.DataSize()), 0); \ } while (false) +// Macro to invoke MS_LOG for MSTensor +#define TEST_MS_LOG_MSTENSOR(_loglevel, _msg, _mstensor) \ + do { \ + std::shared_ptr _de_tensor; \ + ASSERT_OK(Tensor::CreateFromMSTensor(_mstensor, &_de_tensor)); \ + MS_LOG(_loglevel) << _msg << *_de_tensor; \ + } while (false) + namespace UT { class Common : public testing::Test { public: