From 008b91b2a165096e2babfb96d4b448b9766f67e4 Mon Sep 17 00:00:00 2001 From: anzhengqi Date: Wed, 15 Jul 2020 21:10:49 +0800 Subject: [PATCH] inject epoch ctrl op in the execution tree and send eos at the end of epoch --- .../ccsrc/minddata/dataset/api/de_pipeline.cc | 35 +- .../ccsrc/minddata/dataset/api/de_pipeline.h | 11 +- .../minddata/dataset/api/python_bindings.cc | 6 +- .../dataset/engine/dataset_iterator.cc | 47 +- .../dataset/engine/dataset_iterator.h | 3 + .../dataset/engine/datasetops/CMakeLists.txt | 1 + .../engine/datasetops/build_vocab_op.cc | 26 + .../engine/datasetops/build_vocab_op.h | 21 + .../engine/datasetops/cache_merge_op.cc | 16 +- .../engine/datasetops/cache_merge_op.h | 5 + .../dataset/engine/datasetops/dataset_op.cc | 16 + .../dataset/engine/datasetops/dataset_op.h | 9 + .../engine/datasetops/device_queue_op.cc | 85 +- .../engine/datasetops/device_queue_op.h | 20 +- .../engine/datasetops/epoch_ctrl_op.cc | 130 + .../dataset/engine/datasetops/epoch_ctrl_op.h | 82 + .../dataset/engine/datasetops/repeat_op.cc | 4 +- .../dataset/engine/datasetops/repeat_op.h | 8 +- .../dataset/engine/datasetops/zip_op.cc | 3 +- .../minddata/dataset/engine/execution_tree.cc | 15 +- .../minddata/dataset/engine/execution_tree.h | 8 +- .../dataset/engine/opt/CMakeLists.txt | 1 + .../ccsrc/minddata/dataset/engine/opt/pass.cc | 17 + .../ccsrc/minddata/dataset/engine/opt/pass.h | 10 + .../dataset/engine/opt/post/repeat_pass.cc | 88 +- .../dataset/engine/opt/post/repeat_pass.h | 24 +- .../dataset/engine/opt/pre/injection_pass.cc | 82 + .../dataset/engine/opt/pre/injection_pass.h | 75 + .../minddata/dataset/engine/tdt/tdt_plugin.cc | 23 +- .../minddata/dataset/engine/tdt/tdt_plugin.h | 3 +- mindspore/ccsrc/pipeline/jit/pipeline.cc | 3 + mindspore/dataset/engine/datasets.py | 61 +- mindspore/dataset/engine/iterators.py | 36 +- mindspore/train/_utils.py | 8 +- mindspore/train/dataset_helper.py | 159 +- mindspore/train/model.py | 43 +- model_zoo/alexnet/train.py | 2 +- model_zoo/bert/run_classifier.py | 9 +- model_zoo/bert/run_ner.py | 9 +- model_zoo/bert/run_pretrain.py | 9 +- model_zoo/bert/run_squad.py | 9 +- model_zoo/bert/src/dataset.py | 1 - model_zoo/deepfm/train.py | 4 +- model_zoo/deeplabv3/train.py | 2 +- model_zoo/faster_rcnn/train.py | 2 +- model_zoo/googlenet/train.py | 2 +- model_zoo/lenet/train.py | 3 +- model_zoo/lenet_quant/train.py | 2 +- model_zoo/lstm/train.py | 2 +- model_zoo/mass/train.py | 12 +- model_zoo/mobilenetv2/train.py | 4 +- model_zoo/mobilenetv2_quant/train.py | 2 +- model_zoo/mobilenetv3/train.py | 4 +- .../official/nlp/transformer/src/dataset.py | 6 +- model_zoo/official/nlp/transformer/train.py | 10 +- model_zoo/resnet/train.py | 4 +- model_zoo/resnet_thor/train.py | 2 +- model_zoo/ssd/train.py | 2 +- model_zoo/vgg16/train.py | 2 +- model_zoo/wide_and_deep/train.py | 2 +- model_zoo/wide_and_deep/train_and_eval.py | 4 +- .../train_and_eval_auto_parallel.py | 8 +- .../train_and_eval_distribute.py | 4 +- model_zoo/yolov3_resnet18/train.py | 2 +- tests/dataset_mock.py | 9 +- .../deeplabv3/train_one_epoch_with_loss.py | 2 +- .../transformer/test_transformer.py | 10 +- .../train_and_test_multinpu_ci.py | 4 +- ...rain_and_test_multinpu_ci_data_parallel.py | 4 +- .../st/model_zoo_tests/yolov3/test_yolov3.py | 6 +- .../models/bert/test_bert_tdt_lossscale.py | 14 +- .../models/deeplabv3/test_deeplabv3.py | 2 +- .../models/resnet50/test_resnet50_imagenet.py | 8 +- tests/st/tbe_networks/resnet_cifar.py | 2 +- tests/st/tbe_networks/test_resnet_cifar_1p.py | 2 +- tests/st/tbe_networks/test_resnet_cifar_8p.py | 2 +- tests/ut/cpp/dataset/CMakeLists.txt | 3 +- tests/ut/cpp/dataset/cache_op_test.cc | 34 +- tests/ut/cpp/dataset/epoch_ctrl_op_test.cc | 639 ++++ tests/ut/cpp/dataset/repeat_op_test.cc | 3 +- tests/ut/python/dataset/test_cache_map.py | 6 + .../python/dataset/test_datasets_tfrecord.py | 4 +- tests/ut/python/dataset/test_deviceop_cpu.py | 14 +- tests/ut/python/dataset/test_epoch_ctrl.py | 608 ++++ tests/ut/python/dataset/test_five_crop.py | 2 +- tests/ut/python/dataset/test_get_size.py | 16 +- tests/ut/python/dataset/test_iterator.py | 2 +- tests/ut/python/dataset/test_repeat.py | 45 + tests/ut/python/dataset/test_zip.py | 22 +- tests/ut/python/log | 2770 +++++++++++++++++ .../parallel/test_auto_parallel_resnet.py | 3 + tests/ut/python/parallel/test_bias_add.py | 3 + .../parallel/test_gather_v2_primitive.py | 3 + tests/ut/python/train/test_dataset_helper.py | 107 + 94 files changed, 5260 insertions(+), 397 deletions(-) create mode 100644 mindspore/ccsrc/minddata/dataset/engine/datasetops/epoch_ctrl_op.cc create mode 100644 mindspore/ccsrc/minddata/dataset/engine/datasetops/epoch_ctrl_op.h create mode 100644 mindspore/ccsrc/minddata/dataset/engine/opt/pre/injection_pass.cc create mode 100644 mindspore/ccsrc/minddata/dataset/engine/opt/pre/injection_pass.h create mode 100644 tests/ut/cpp/dataset/epoch_ctrl_op_test.cc create mode 100644 tests/ut/python/dataset/test_epoch_ctrl.py create mode 100644 tests/ut/python/log create mode 100644 tests/ut/python/train/test_dataset_helper.py diff --git a/mindspore/ccsrc/minddata/dataset/api/de_pipeline.cc b/mindspore/ccsrc/minddata/dataset/api/de_pipeline.cc index 0c4c6273a6..b31fdcf63b 100644 --- a/mindspore/ccsrc/minddata/dataset/api/de_pipeline.cc +++ b/mindspore/ccsrc/minddata/dataset/api/de_pipeline.cc @@ -25,6 +25,8 @@ #include "minddata/dataset/engine/dataset_iterator.h" #include "minddata/dataset/engine/datasetops/bucket_batch_by_length_op.h" #include "minddata/dataset/engine/datasetops/cache_op.h" +#include "minddata/dataset/engine/datasetops/device_queue_op.h" +#include "minddata/dataset/engine/datasetops/epoch_ctrl_op.h" #include "minddata/dataset/engine/datasetops/filter_op.h" #include "minddata/dataset/engine/datasetops/source/celeba_op.h" #include "minddata/dataset/engine/datasetops/source/cifar_op.h" @@ -84,7 +86,8 @@ static std::unordered_map g_parse_op_func_ = { {kRandomData, &DEPipeline::ParseRandomDataOp}, {kTextFile, &DEPipeline::ParseTextFileOp}, {kBuildVocab, &DEPipeline::ParseBuildVocabOp}, - {kClue, &DEPipeline::ParseClueOp}}; + {kClue, &DEPipeline::ParseClueOp}, + {kEpochCtrl, &DEPipeline::ParseEpochCtrlOp}}; DEPipeline::DEPipeline() : iterator_(nullptr) { try { @@ -166,8 +169,8 @@ Status DEPipeline::AddChildToParentNode(const DsOpPtr &child_op, const DsOpPtr & Status DEPipeline::AssignRootNode(const DsOpPtr &dataset_op) { return (tree_->AssignRoot(dataset_op)); } // Function to launch the tree execution. -Status DEPipeline::LaunchTreeExec() { - RETURN_IF_NOT_OK(tree_->Prepare()); +Status DEPipeline::LaunchTreeExec(const int32_t num_epochs) { + RETURN_IF_NOT_OK(tree_->Prepare(num_epochs)); RETURN_IF_NOT_OK(tree_->Launch()); iterator_ = std::make_unique(tree_); if (iterator_ == nullptr) RETURN_STATUS_UNEXPECTED("Cannot create an Iterator."); @@ -252,6 +255,16 @@ int DEPipeline::GetRepeatCount() const { return repeat_num_; } float ToFloat(const py::handle &handle) { return py::reinterpret_borrow(handle); } +Status DEPipeline::StopSend() { + // tree_.root() must be DeviceQueueOp + DeviceQueueOp *op = dynamic_cast(tree_->root().get()); + if (op == nullptr) { + return Status(StatusCode::kUnexpectedError, __LINE__, __FILE__, "StopSend only supported by DeviceQueueOp"); + } + op->StopSend(); + return Status::OK(); +} + int ToInt(const py::handle &handle) { return py::reinterpret_borrow(handle); } bool ToBool(const py::handle &handle) { return py::reinterpret_borrow(handle); } @@ -804,6 +817,18 @@ Status DEPipeline::ParseSkipOp(const py::dict &args, std::shared_ptr return Status::OK(); } +Status DEPipeline::ParseEpochCtrlOp(const py::dict &args, std::shared_ptr *top, + std::shared_ptr *bottom) { + if (args["count"].is_none()) { + std::string err_msg = "Error: count is invalid or not set."; + RETURN_STATUS_UNEXPECTED(err_msg); + } + std::shared_ptr op; + RETURN_IF_NOT_OK(EpochCtrlOp::Builder(ToInt(args["count"])).Build(&op)); + *top = op; + return Status::OK(); +} + Status DEPipeline::ParseGeneratorOp(const py::dict &args, std::shared_ptr *top, std::shared_ptr *bottom) { std::shared_ptr builder = std::make_shared(); @@ -973,8 +998,8 @@ Status DEPipeline::ParseDeviceQueueOp(const py::dict &args, std::shared_ptrSetDeviceType(ToString(value)); } else if (key == "device_id") { (void)builder->SetDeviceId(ToInt(value)); - } else if (key == "num_batch") { - (void)builder->SetNumBatch(ToInt(value)); + } else if (key == "send_epoch_end") { + (void)builder->SetSendEpochEnd(ToBool(value)); } } } diff --git a/mindspore/ccsrc/minddata/dataset/api/de_pipeline.h b/mindspore/ccsrc/minddata/dataset/api/de_pipeline.h index b3adb6ae9f..8c6cc48dbb 100644 --- a/mindspore/ccsrc/minddata/dataset/api/de_pipeline.h +++ b/mindspore/ccsrc/minddata/dataset/api/de_pipeline.h @@ -70,7 +70,8 @@ enum OpName { kRandomData, kTextFile, kBuildVocab, - kClue + kClue, + kEpochCtrl }; // The C++ binder class that we expose to the python script. @@ -90,7 +91,7 @@ class DEPipeline { Status AssignRootNode(const DsOpPtr &dataset_op); // Function to launch the tree execution. - Status LaunchTreeExec(); + Status LaunchTreeExec(int32_t num_epochs); // Get a row of data as dictionary of column name to the value. Status GetNextAsMap(py::dict *output); @@ -143,6 +144,10 @@ class DEPipeline { Status ParseBucketBatchByLengthOp(const py::dict &args, std::shared_ptr *top, std::shared_ptr *bottom); + Status ParseEpochCtrlOp(const py::dict &args, std::shared_ptr *top, std::shared_ptr *bottom); + + Status ParseBatchOp(const py::dict &args, std::shared_ptr *ptr); + Status ParseBarrierOp(const py::dict &args, std::shared_ptr *top, std::shared_ptr *bottom); Status ParseGeneratorOp(const py::dict &args, std::shared_ptr *top, std::shared_ptr *bottom); @@ -189,6 +194,8 @@ class DEPipeline { Status ParseBuildVocabOp(const py::dict &args, std::shared_ptr *top, std::shared_ptr *bottom); + Status StopSend(); + Status ParseClueOp(const py::dict &args, std::shared_ptr *top, std::shared_ptr *bottom); private: diff --git a/mindspore/ccsrc/minddata/dataset/api/python_bindings.cc b/mindspore/ccsrc/minddata/dataset/api/python_bindings.cc index 94c4ec40d7..d40b7dc975 100644 --- a/mindspore/ccsrc/minddata/dataset/api/python_bindings.cc +++ b/mindspore/ccsrc/minddata/dataset/api/python_bindings.cc @@ -159,7 +159,7 @@ void bindDEPipeline(py::module *m) { [](DEPipeline &de, const DsOpPtr &dataset_op) { THROW_IF_ERROR(de.AssignRootNode(dataset_op)); }) .def("SetBatchParameters", [](DEPipeline &de, const py::dict &args) { THROW_IF_ERROR(de.SetBatchParameters(args)); }) - .def("LaunchTreeExec", [](DEPipeline &de) { THROW_IF_ERROR(de.LaunchTreeExec()); }) + .def("LaunchTreeExec", [](DEPipeline &de, int32_t num_epochs) { THROW_IF_ERROR(de.LaunchTreeExec(num_epochs)); }) .def("GetNextAsMap", [](DEPipeline &de) { py::dict out; @@ -188,6 +188,7 @@ void bindDEPipeline(py::module *m) { .def("GetBatchSize", &DEPipeline::GetBatchSize) .def("GetNumClasses", &DEPipeline::GetNumClasses) .def("GetRepeatCount", &DEPipeline::GetRepeatCount) + .def("StopSend", [](DEPipeline &de) { THROW_IF_ERROR(de.StopSend()); }) .def("SaveDataset", [](DEPipeline &de, const std::vector &file_names, const std::string &file_type) { THROW_IF_ERROR(de.SaveDataset(file_names, file_type)); return true; @@ -999,7 +1000,8 @@ PYBIND11_MODULE(_c_dataengine, m) { .value("BUILDVOCAB", OpName::kBuildVocab) .value("CELEBA", OpName::kCelebA) .value("TEXTFILE", OpName::kTextFile) - .value("CLUE", OpName::kClue); + .value("CLUE", OpName::kClue) + .value("EPOCHCTRL", OpName::kEpochCtrl); (void)py::enum_(m, "JiebaMode", py::arithmetic()) .value("DE_JIEBA_MIX", JiebaMode::kMix) diff --git a/mindspore/ccsrc/minddata/dataset/engine/dataset_iterator.cc b/mindspore/ccsrc/minddata/dataset/engine/dataset_iterator.cc index f75ca5d097..99c5c96b40 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/dataset_iterator.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/dataset_iterator.cc @@ -40,7 +40,9 @@ Status IteratorBase::GetNextAsMap(TensorMap *out_map) { out_map->clear(); TensorRow curr_row; + MS_LOG(INFO) << "get next as map start."; RETURN_IF_NOT_OK(FetchNextTensorRow(&curr_row)); + MS_LOG(INFO) << "fetchNextTensor success."; // Return empty map if there's no data if (curr_row.empty()) { @@ -105,7 +107,8 @@ Status DatasetIterator::FetchNextTensorRow(TensorRow *out_row) { // Once eof is handled, always return empty row. Class must be destroyed and recreated if you // want to iterate again. if (eof_handled_) { - return Status::OK(); + std::string err = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs."; + RETURN_STATUS_UNEXPECTED(err); } // Check if we need to get a new DataBuffer to iterate. @@ -119,36 +122,22 @@ Status DatasetIterator::FetchNextTensorRow(TensorRow *out_row) { // Since GetNextBuffer was used rather than GetNextInput(), it means we need to manually // handle eoe and eof messages here. // - // An eoe buffer means we have iterated fully to the end of the tree. - // An eoe buffer will be immediately followed by an eof buffer, which signals the shutdown of - // all operators. + // An eoe buffer means we have iterated an epoch. + // The next buffer in the pipeline might be an EOF or a databuffer for next epoch if (curr_buffer_->eoe()) { - MS_LOG(DEBUG) << "End of data iteration. Fetch eof and then return empty row."; - - // Before returning the last empty vector, fetch the eof buffer which should be the last - // buffer, and then free it. - RETURN_IF_NOT_OK(root_->GetNextBuffer(&curr_buffer_)); - - if (!curr_buffer_->eof()) { - RETURN_STATUS_UNEXPECTED("Non-eof after getting eoe in iterator!"); - } - eof_handled_ = true; - curr_buffer_.reset(); // explicitly free the eof buffer - // Set tree to Finished state - root_->Tree()->SetFinished(); - + MS_LOG(INFO) << "End of data iteration."; + curr_buffer_.reset(); // explicitly free the eoe buffer return Status::OK(); } + // An eof buffer means it is the end of execution and all operators are shutting down. + // Because there is no more data to return to the caller, this will change `eof_handled_` state and + // returns status unexpected error. if (curr_buffer_->eof()) { - // An eof by itself, without being preceded by an eoe, is possible if a repeat operator - // exists below us in the stack. Repeat operator eats eoe's but eventually allows the - // flow of an eof up the pipeline by itself. eof_handled_ = true; curr_buffer_.reset(); // explicitly free the eof buffer - // Set tree to Finished state - root_->Tree()->SetFinished(); - return Status::OK(); + std::string err = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs."; + RETURN_STATUS_UNEXPECTED(err); } } @@ -208,20 +197,24 @@ Status ChildIterator::FetchNextTensorRow(TensorRow *out_row) { // Once eof is handled, always return empty row. Class must be destroyed and recreated if you // want to iterate again. if (eof_handled_) { - return Status::OK(); + std::string err = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs."; + RETURN_STATUS_UNEXPECTED(err); } // Check if we need to get a new DataBuffer to iterate. if (curr_buffer_ == nullptr || curr_buffer_->NumRows() == 0) { + // GetNextInput() depends on current_op's EoeReceived. So, EOE buffer might be already be handled and + // this child iterator might not see EOE buffer. RETURN_IF_NOT_OK(current_op_->GetNextInput(&curr_buffer_, worker_id_, child_idx_)); - // Unlike the DatasetIterator, this child iterator does not quit after eoe. - // Instead, if an eoe is picked up here, we simply return an empty vector and it's up to the + // If an eoe is picked up here, we simply return an empty vector and it's up to the // caller to decide what it wants to do next. if (curr_buffer_->eoe()) { MS_LOG(DEBUG) << "Child iterator picked up EOE."; end_epoch_ = true; return Status::OK(); + } else { + end_epoch_ = false; } if (curr_buffer_->eof()) { diff --git a/mindspore/ccsrc/minddata/dataset/engine/dataset_iterator.h b/mindspore/ccsrc/minddata/dataset/engine/dataset_iterator.h index 253d1604e2..07f93478a4 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/dataset_iterator.h +++ b/mindspore/ccsrc/minddata/dataset/engine/dataset_iterator.h @@ -144,6 +144,9 @@ class ChildIterator : public IteratorBase { // @return The string to column id mapping. std::unordered_map GetColumnNameMap() const override; + // Return T/F if end of epoch + bool end_of_epoch() { return end_epoch_; } + private: DatasetOp *current_op_; // The parent operator. We consume from it's children. int32_t child_idx_; // The specific child this iterator will fetch from. diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/CMakeLists.txt b/mindspore/ccsrc/minddata/dataset/engine/datasetops/CMakeLists.txt index a2cd6dc07a..94c313f174 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/CMakeLists.txt +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/CMakeLists.txt @@ -18,6 +18,7 @@ set(DATASET_ENGINE_DATASETOPS_SRC_FILES shuffle_op.cc zip_op.cc concat_op.cc + epoch_ctrl_op.cc cache_base_op.cc cache_lookup_op.cc cache_op.cc diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/build_vocab_op.cc b/mindspore/ccsrc/minddata/dataset/engine/datasetops/build_vocab_op.cc index 8ed51ebbb6..2867533842 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/build_vocab_op.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/build_vocab_op.cc @@ -17,11 +17,13 @@ #include "minddata/dataset/engine/datasetops/build_vocab_op.h" #include +#include #include #include #include #include #include "minddata/dataset/core/config_manager.h" +#include "minddata/dataset/engine/opt/pass.h" namespace mindspore { namespace dataset { @@ -202,5 +204,29 @@ BuildVocabOp::Builder::Builder() builder_num_workers_ = cfg->num_parallel_workers(); builder_connector_size_ = cfg->op_connector_size(); } + +// A print method typically used for debugging +void BuildVocabOp::Print(std::ostream &out, bool show_all) const { + // Always show the id and name as first line regardless if this summary or detailed print + out << "(" << std::setw(2) << operator_id_ << ") :"; + if (!show_all) { + // Call the super class for displaying any common 1-liner info + ParallelOp::Print(out, show_all); + // Then show any custom derived-internal 1-liner info for this op + out << "\n"; + } else { + // Call the super class for displaying any common detailed info + ParallelOp::Print(out, show_all); + // Then show any custom derived-internal stuff + out << "\nCode is needed here to show more info about the op." + << "\n\n"; + } +} + +// Pre-Visitor accept method for NodePass +Status BuildVocabOp::PreAccept(NodePass *p, bool *modified) { + // Downcast shared pointer then call the pre-visitation + return p->PreRunOnNode(shared_from_base(), modified); +} } // namespace dataset } // namespace mindspore diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/build_vocab_op.h b/mindspore/ccsrc/minddata/dataset/engine/datasetops/build_vocab_op.h index 42ea0deb5c..040de40cf6 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/build_vocab_op.h +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/build_vocab_op.h @@ -131,6 +131,21 @@ class BuildVocabOp : public ParallelOp { ~BuildVocabOp() = default; + /// \brief A print method typically used for debugging + /// \param[out] out The output stream to write output to + /// \param[in] show_all A bool to control if you want to show all info or just a summary + void Print(std::ostream &out, bool show_all) const override; + + /// \briefStream output operator overload + /// \notes This allows you to write the debug print info using stream operators + /// \param[out] out Reference to the output stream being overloaded + /// \param[in] vop - reference to the BuildVocabOp to display + /// \return - the output stream must be returned + friend std::ostream &operator<<(std::ostream &out, const BuildVocabOp &vop) { + vop.Print(out, false); + return out; + } + Status WorkerEntry(int32_t worker_id) override; // collect the work product from each worker @@ -152,6 +167,12 @@ class BuildVocabOp : public ParallelOp { Status Reset() override { RETURN_STATUS_UNEXPECTED("Reset shouldn't be called in BuildVocabOp"); } + /// \brief Base-class override for NodePass pre-visit acceptor + /// \param[in] p The node to visit + /// \param[out] modified Indicator if the node was modified + /// \return Status of the node visit + Status PreAccept(NodePass *p, bool *modified) override; + private: const int32_t interval_; bool special_first_; diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/cache_merge_op.cc b/mindspore/ccsrc/minddata/dataset/engine/datasetops/cache_merge_op.cc index 75579dc3a6..39029918e8 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/cache_merge_op.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/cache_merge_op.cc @@ -96,7 +96,7 @@ Status CacheMergeOp::WorkerEntry(int32_t worker_id) { RETURN_IF_NOT_OK(cache_hit_stream->GetNextBuffer(&db_ptr, worker_id)); } } - RETURN_IF_NOT_OK(out_connector_->Add(worker_id, std::move(db_ptr))); + RETURN_IF_NOT_OK(EofReceived(worker_id)); return Status::OK(); } Status CacheMergeOp::CacheMissWorkerEntry(int32_t workerId) { @@ -298,5 +298,19 @@ Status CacheMergeOp::EoeReceived(int32_t worker_id) { } return Status::OK(); } + +// Base-class override for handling cases when an eof is received. +Status CacheMergeOp::EofReceived(int32_t worker_id) { + // If we are not in a repeated path, then the merge op gets a eof by itself, without first + // getting an eoe. However, the logic demands that all epochs close with an eoe first before eof. + // Thus, generate an eoe first, before flowing up the eof in the non-repeated case. Base class + // provides that for us. + if (!BitTest(op_ctrl_flags_, kDeOpRepeated)) { + MS_LOG(DEBUG) << "Cache merge sending eoe"; + RETURN_IF_NOT_OK(DatasetOp::EoeReceived(worker_id)); + } + MS_LOG(DEBUG) << "Cache merge sending eof"; + return DatasetOp::EofReceived(worker_id); +} } // namespace dataset } // namespace mindspore diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/cache_merge_op.h b/mindspore/ccsrc/minddata/dataset/engine/datasetops/cache_merge_op.h index df37465fc4..97c64f0fd0 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/cache_merge_op.h +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/cache_merge_op.h @@ -176,6 +176,11 @@ class CacheMergeOp : public ParallelOp { /// \return Status object Status EoeReceived(int32_t worker_id) override; + /// \brief Base-class override for handling cases when an eof is received. + /// \param worker_id - The worker id + /// \return Status - The error code return + Status EofReceived(int32_t worker_id) override; + protected: Status ComputeColMap() override; diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/dataset_op.cc b/mindspore/ccsrc/minddata/dataset/engine/datasetops/dataset_op.cc index 9254141308..d22117fc30 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/dataset_op.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/dataset_op.cc @@ -26,6 +26,7 @@ #include "minddata/dataset/engine/execution_tree.h" #include "minddata/dataset/engine/datasetops/device_queue_op.h" #include "minddata/dataset/engine/datasetops/source/sampler/sampler.h" +#include "minddata/dataset/engine/datasetops/epoch_ctrl_op.h" #include "minddata/dataset/engine/data_buffer.h" #include "minddata/dataset/engine/db_connector.h" #include "minddata/dataset/engine/opt/pass.h" @@ -102,6 +103,15 @@ Status DatasetOp::InsertAsParent(std::shared_ptr to_add) { } return Status::OK(); } +// Removes child operator in this operator. +Status DatasetOp::RemoveChildren() { + for (const auto &child : child_) { + child->RemoveParent(this); + } + child_.clear(); + + return Status::OK(); +} // Adds a parent operator to this operator void DatasetOp::AddParent(DatasetOp *parent) { parent_.push_back(parent); } @@ -185,6 +195,12 @@ void DatasetOp::Parent(DatasetOp **parent, int32_t parent_index) const { } } +// Getter function to get all of our children. +std::vector> DatasetOp::children() const { return child_; } + +// Getter function to get all of our parents. +std::vector DatasetOp::parents() const { return parent_; } + // Creates the connector within this operator void DatasetOp::CreateConnector(int32_t num_producers, int32_t num_consumers) { MS_LOG(DEBUG) << "Creating connector in tree operator: " << operator_id_ << ". Producer: " << num_producers diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/dataset_op.h b/mindspore/ccsrc/minddata/dataset/engine/datasetops/dataset_op.h index b4630c1652..3271ed73c1 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/dataset_op.h +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/dataset_op.h @@ -76,6 +76,9 @@ class DatasetOp : public std::enable_shared_from_this { /// \return Status eerror code returned Status Remove(); + // Removes child operator in this operator. + Status RemoveChildren(); + /// \brief Getter function to get a shared pointer to our child /// \param[in] child_index An operator can have n children. Indicates which child to return. /// \return The shared pointer to the child. If there are no children, it returns null regardless of the given index @@ -86,6 +89,12 @@ class DatasetOp : public std::enable_shared_from_this { /// \param[in] parent_index An operator can have n parents. Indicates which parent to return. void Parent(DatasetOp **parent, int32_t parent_index) const; + // Getter function to get all of our children. + std::vector> children() const; + + // Getter function to get all of our parents. + std::vector parents() const; + // Inserts a operator as the parent current op. // Inserted op will become the sole parent of the current op. // The existing parent of the current op will be transferred to the inserted op. diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/device_queue_op.cc b/mindspore/ccsrc/minddata/dataset/engine/datasetops/device_queue_op.cc index 4fe779246b..a7fec7728e 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/device_queue_op.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/device_queue_op.cc @@ -25,19 +25,21 @@ #include "minddata/dataset/engine/opt/pass.h" #include "minddata/dataset/engine/perf/profiling.h" #include "minddata/dataset/engine/perf/device_queue_tracing.h" +#include "minddata/dataset/engine/datasetops/epoch_ctrl_op.h" #include "minddata/dataset/util/status.h" #include "minddata/dataset/util/task_manager.h" namespace mindspore { namespace dataset { DeviceQueueOp::DeviceQueueOp(std::string channel_name, DeviceType device_type, int32_t device_id, int32_t prefetch_size, - int32_t op_connector_size, int64_t num_batch) + int32_t op_connector_size, bool send_epoch_end) : PipelineOp(op_connector_size), channel_name_(channel_name), device_type_(device_type), device_id_(device_id), prefetch_size_(prefetch_size), - num_batch_(num_batch) {} + send_epoch_end_(send_epoch_end), + stop_send_(false) {} DeviceQueueOp::~DeviceQueueOp() {} @@ -53,8 +55,7 @@ DeviceQueueOp::Builder::Builder(int32_t prefetch_size) : builder_prefetch_size_(prefetch_size), builder_device_id_(0), builder_device_type_(DeviceType::CPU), - builder_channel_name_(""), - builder_num_batch_(0) { + builder_channel_name_("") { std::shared_ptr cfg = GlobalContext::config_manager(); builder_op_connector_size_ = cfg->op_connector_size(); } @@ -64,6 +65,18 @@ Status DeviceQueueOp::EoeReceived(int32_t worker_id) { return Status::OK(); } +Status DeviceQueueOp::CheckExceptions(const std::unique_ptr &buffer) const { + // this method checks if the buffer meets the conditions to be sent to TDT + if (buffer->NumRows() != 0) { + TensorRow row; + buffer->GetRow(0, &row); + for (const auto &item : row) { + CHECK_FAIL_RETURN_UNEXPECTED(item->type().IsNumeric(), "Cannot send tensor of string type to device."); + } + } + return Status::OK(); +} + Status DeviceQueueOp::operator()() { TaskManager::FindMe()->Post(); @@ -82,23 +95,10 @@ Status DeviceQueueOp::operator()() { return Status::OK(); } -Status DeviceQueueOp::CheckExceptions(const std::unique_ptr &buffer) const { - // this method checks if the buffer meets the conditions to be sent to TDT - if (buffer->NumRows() != 0) { - TensorRow row; - buffer->GetRow(0, &row); - for (const auto &item : row) { - CHECK_FAIL_RETURN_UNEXPECTED(item->type().IsNumeric(), "Cannot send tensor of string type to device."); - } - } - return Status::OK(); -} - #ifdef ENABLE_TDTQUE Status DeviceQueueOp::SendDataToAscend() { MS_LOG(INFO) << "Device queue, sending data to Ascend."; int64_t total_batch = 0; - bool is_break_loop = false; double batch_start_time, end_time; int32_t batch_cost, tdt_cost; int32_t connector_size = 0; @@ -115,15 +115,20 @@ Status DeviceQueueOp::SendDataToAscend() { std::unique_ptr current_buffer; RETURN_IF_NOT_OK(GetNextInput(¤t_buffer)); - while (!current_buffer->eof() && !is_break_loop) { - while (!current_buffer->eoe() && !is_break_loop) { + while (!current_buffer->eof()) { + while (!current_buffer->eoe()) { RETURN_IF_NOT_OK(CheckExceptions(current_buffer)); TensorRow currRow; - for (int row_id = 0; row_id < current_buffer->NumRows() && !is_break_loop; row_id++) { + for (int row_id = 0; row_id < current_buffer->NumRows(); row_id++) { RETURN_IF_NOT_OK(current_buffer->GetRow(row_id, &currRow)); auto status = tdtInstancePtr->hostPush(currRow, true, channel_name_, isProfilingEnable, tdt_cost); if (status == TdtStatus::FAILED) { - return Status(StatusCode::kTDTPushFailure, "TDT Push Failed"); + if (stop_send_) { + MS_LOG(INFO) << "stop_send received"; + return Status::OK(); + } else { + return Status(StatusCode::kTDTPushFailure, "TDT Push Failed"); + } } if (isProfilingEnable) { @@ -140,9 +145,6 @@ Status DeviceQueueOp::SendDataToAscend() { profiling_node->Record(CONNECTOR_DEPTH, connector_capacity, total_batch + 1, connector_size); } total_batch++; - if (num_batch_ > 0 && total_batch == num_batch_) { - is_break_loop = true; - } } if (isProfilingEnable) { connector_size = ChildOpConnectorSize(); @@ -150,6 +152,19 @@ Status DeviceQueueOp::SendDataToAscend() { } RETURN_IF_NOT_OK(GetNextInput(¤t_buffer)); } + if (current_buffer->eoe() && send_epoch_end_) { + TensorRow currRow; + auto status = + tdtInstancePtr->hostPush(currRow, true, channel_name_, isProfilingEnable, tdt_cost, tdt::TDT_END_OF_SEQUENCE); + if (status == TdtStatus::FAILED) { + if (stop_send_) { + MS_LOG(INFO) << "stop_send received"; + return Status::OK(); + } else { + return Status(StatusCode::kTDTPushFailure, "TDT Push Failed"); + } + } + } if (isProfilingEnable) { connector_size = ChildOpConnectorSize(); connector_capacity = ChildOpConnectorCapacity(); @@ -158,7 +173,7 @@ Status DeviceQueueOp::SendDataToAscend() { } tree_->SetFinished(); - MS_LOG(INFO) << "Device queue total batch is " << total_batch << ", number of batches is " << num_batch_ << "."; + MS_LOG(INFO) << "Device queue total batch is " << total_batch; return Status::OK(); } @@ -196,9 +211,6 @@ Status DeviceQueueOp::SendDataToGPU() { } RETURN_IF_NOT_OK(RetryPushGPUData(data_size, curr_row, handle)); total_batch++; - if (num_batch_ > 0 && total_batch == num_batch_) { - is_break_loop = true; - } } if (!TaskManager::FindMe()->Interrupted()) RETURN_IF_NOT_OK(GetNextInput(¤t_buffer)); @@ -211,12 +223,10 @@ Status DeviceQueueOp::SendDataToGPU() { is_break_loop = true; } - MS_LOG(INFO) << "Device queue total batch is " << total_batch << ", number of batches is " << num_batch_ << "."; + MS_LOG(INFO) << "Device queue total batch is " << total_batch << "."; GpuBufferMgr::GetInstance().Close(handle); - GpuBufferMgr::GetInstance().CloseConfirm(); - return Status::OK(); } @@ -240,8 +250,11 @@ Status DeviceQueueOp::RetryPushGPUData(const std::vector &data_size, con if (ret == BlockQueueStatus_T::ERROR_INPUT) { return Status(StatusCode::kUnexpectedError, __LINE__, __FILE__, "invalid input Data, please check it."); } else { - MS_LOG(WARNING) << "Retry pushing data..."; - continue; + if (!stop_send_) { + MS_LOG(WARNING) << "Retry pushing data..."; + continue; + } + break; } } else { break; @@ -283,13 +296,11 @@ Status DeviceQueueOp::SendDataToCPU() { MS_LOG(DEBUG) << "Feature size is " << curr_row[0]->SizeInBytes() << "."; MS_LOG(DEBUG) << "Label size is " << curr_row[1]->SizeInBytes() << "."; total_batch++; - if (num_batch_ > 0 && total_batch == num_batch_) { - break; - } + if (stop_send_) break; } } - MS_LOG(INFO) << "Device queue total batch is " << total_batch << ", number of batches is " << num_batch_ << "."; + MS_LOG(INFO) << "Device queue total batch is " << total_batch << "."; return Status::OK(); } diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/device_queue_op.h b/mindspore/ccsrc/minddata/dataset/engine/datasetops/device_queue_op.h index 0fb4fb093d..347cd108cc 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/device_queue_op.h +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/device_queue_op.h @@ -21,6 +21,7 @@ #include #include "minddata/dataset/engine/datasetops/pipeline_op.h" +#include "minddata/dataset/engine/datasetops/repeat_op.h" #include "minddata/dataset/util/status.h" #ifdef ENABLE_TDTQUE @@ -84,8 +85,8 @@ class DeviceQueueOp : public PipelineOp { return *this; } - Builder &SetNumBatch(int64_t num_batch) { - builder_num_batch_ = num_batch; + Builder &SetSendEpochEnd(bool send_epoch_end) { + builder_send_epoch_end_ = send_epoch_end; return *this; } @@ -94,8 +95,9 @@ class DeviceQueueOp : public PipelineOp { // to call this Build() method. It will instantiate the DeviceQueueOp // and return it to caller as a shared pointer. Status Build(std::shared_ptr *ptr) { - *ptr = std::make_shared(builder_channel_name_, builder_device_type_, builder_device_id_, - builder_prefetch_size_, builder_op_connector_size_, builder_num_batch_); + *ptr = + std::make_shared(builder_channel_name_, builder_device_type_, builder_device_id_, + builder_prefetch_size_, builder_op_connector_size_, builder_send_epoch_end_); return Status::OK(); } @@ -104,14 +106,14 @@ class DeviceQueueOp : public PipelineOp { int32_t builder_device_id_; DeviceType builder_device_type_; std::string builder_channel_name_; - int64_t builder_num_batch_; int32_t builder_op_connector_size_; + bool builder_send_epoch_end_; }; // Name: constructor // Description DeviceQueueOp(std::string channel_name, DeviceType device_type, int32_t device_id, int32_t prefetch_size, - int32_t op_connector_size, int64_t num_batch); + int32_t op_connector_size, bool send_epoch_end); // Name: destructor // Description @@ -121,6 +123,8 @@ class DeviceQueueOp : public PipelineOp { const int32_t get_prefetch_size() { return prefetch_size_; } + void StopSend() { stop_send_ = true; } + // Name: Print() // Description: A function that prints info about the node void Print(std::ostream &out, // In: The output stream to print to @@ -149,6 +153,7 @@ class DeviceQueueOp : public PipelineOp { // Description: Check whether the dataBuffer meets the condition for performing DeviceQueueOp Status CheckExceptions(const std::unique_ptr &buffer) const; + private: #ifdef ENABLE_TDTQUE Status SendDataToAscend(); #endif @@ -164,7 +169,8 @@ class DeviceQueueOp : public PipelineOp { DeviceType device_type_; const int32_t device_id_; const int32_t prefetch_size_; - const int64_t num_batch_; + const bool send_epoch_end_; + bool stop_send_; #ifdef ENABLE_TDTQUE std::shared_ptr tdtInstancePtr; diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/epoch_ctrl_op.cc b/mindspore/ccsrc/minddata/dataset/engine/datasetops/epoch_ctrl_op.cc new file mode 100644 index 0000000000..de0ab7452f --- /dev/null +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/epoch_ctrl_op.cc @@ -0,0 +1,130 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include + +#include "minddata/dataset/engine/execution_tree.h" +#include "minddata/dataset/engine/datasetops/epoch_ctrl_op.h" +#include "minddata/dataset/engine/data_buffer.h" +#include "minddata/dataset/engine/db_connector.h" +#include "minddata/dataset/engine/opt/pass.h" +#include "utils/log_adapter.h" + +namespace mindspore { +namespace dataset { + +// The builder "build" method creates the final object. +Status EpochCtrlOp::Builder::Build(std::shared_ptr *ptr) { + RETURN_IF_NOT_OK(SanityCheck()); + *ptr = std::make_shared(build_max_repeats_); + return Status::OK(); +} + +// Constructor +EpochCtrlOp::EpochCtrlOp(int32_t num_epoch) : RepeatOp(num_epoch) { MS_LOG(INFO) << "Welcome to Epoch Ctrl Op."; } + +// Destructor +EpochCtrlOp::~EpochCtrlOp() {} + +// A print method typically used for debugging +void EpochCtrlOp::Print(std::ostream &out, bool show_all) const { + // Always show the id and name as first line regardless if this summary or detailed print + out << "(" << std::setw(2) << operator_id_ << ") :"; + if (!show_all) { + // Call the super class for displaying any common 1-liner info + PipelineOp::Print(out, show_all); + // Then show any custom derived-internal 1-liner info for this op + out << " [epochs: " << max_repeats_ << "]\n"; + } else { + // Call the super class for displaying any common detailed info + PipelineOp::Print(out, show_all); + // Then show any custom derived-internal stuff + out << "\nCurrent epoch count: " << repeat_count_ << "\nMax epoch count: " << max_repeats_ + << "\nLeaf Nodes in execution path:"; + if (!eoe_ops_.empty()) { + for (size_t i = 0; i < eoe_ops_.size(); i++) { + out << "\n Operator: " << eoe_ops_[i]->id(); + } + } else { + out << " None."; + } + out << "\n\n"; + } +} + +Status EpochCtrlOp::GetNextBuffer(std::unique_ptr *p_buffer, int32_t worker_id, bool retry_if_eoe) { + if (child_.empty()) { + RETURN_STATUS_UNEXPECTED("EpochCtrlOp can't be the leaf node."); + } + + std::unique_ptr buf; + + // `retry_if_eoe` is false because EpochCtrlOp does not eat EOE. + RETURN_IF_NOT_OK(child_[0]->GetNextBuffer(&buf, worker_id, false)); + + // Only intercept EOE for EoeReceived processing, after that the EOE is forwarded to next op. + // Other databuffers containing data or EOF will simply be forwarded. + // EOF can simply be forwarded because this op does not spawn any thread, thus does not require clean up. + if (buf->eoe()) { + RETURN_IF_NOT_OK(EoeReceived(worker_id)); + } + + *p_buffer = std::move(buf); + return Status::OK(); +} + +Status EpochCtrlOp::EoeReceived(int32_t worker_id) { + repeat_count_++; + MS_LOG(DEBUG) << "Epoch Control operator received end of epoch. Epoch count is now: " << repeat_count_ + << ". Repeated: " << BitTest(op_ctrl_flags_, kDeOpRepeated) << ". Max epochs: " << max_repeats_; + + // If we've reached the requested epoch count, then flag the leaf nodes + // to tell them they've got one more epoch to perform. When they reach the end + // of the last epoch, they quit rather than loop again. + if (max_repeats_ != kInfiniteRepeat && repeat_count_ == (max_repeats_ - 1)) { + for (auto &eoe_op : eoe_ops_) { + MS_LOG(DEBUG) << "EpochCtrl setting last repeat for eoe_op: " << eoe_op->id(); + eoe_op->set_control_flag(kDeOpLastRepeat); + } + } + + // This will allow GetNextInput in DatasetOp class to pass EOE buffer instead of eating it. + state_ = OpState::kDeOpIdle; + + if (repeat_count_ != max_repeats_) { + for (auto &eoe_op : eoe_ops_) { + MS_LOG(DEBUG) << "Epoch Control driving reset to op: " << eoe_op->id(); + RETURN_IF_NOT_OK(eoe_op->Reset()); + } + } + + return Status::OK(); +} + +// Pre-Visitor accept method for NodePass +Status EpochCtrlOp::PreAccept(NodePass *p, bool *modified) { + // Downcast shared pointer then call the pre-visitation + return p->PreRunOnNode(shared_from_base(), modified); +} + +// Visitor accept method for NodePass +Status EpochCtrlOp::Accept(NodePass *p, bool *modified) { + // Downcast shared pointer then call the pre-visitation + return p->RunOnNode(shared_from_base(), modified); +} +} // namespace dataset +} // namespace mindspore diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/epoch_ctrl_op.h b/mindspore/ccsrc/minddata/dataset/engine/datasetops/epoch_ctrl_op.h new file mode 100644 index 0000000000..ed8fcb1a34 --- /dev/null +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/epoch_ctrl_op.h @@ -0,0 +1,82 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef DATASET_ENGINE_DATASETOPS_EPOCH_CTRL_OP_H_ +#define DATASET_ENGINE_DATASETOPS_EPOCH_CTRL_OP_H_ + +#include +#include +#include +#include "minddata/dataset/engine/datasetops/repeat_op.h" +#include "minddata/dataset/engine/datasetops/pipeline_op.h" + +namespace mindspore { +namespace dataset { +class EpochCtrlOp : public RepeatOp { + public: + class Builder : public RepeatOp::Builder { + public: + // Builder constructor. Creates the builder object. + // @note No default args + // @param count - The number of repeats to do + // @return This is a constructor. + explicit Builder(int32_t count) : RepeatOp::Builder(count) {} + + // Default destructor + ~Builder() = default; + + // The builder "build" method creates the final object. + // @return shared_ptr to the new EpochCtrlOp object + Status Build(std::shared_ptr *); + }; + + // Contructor + explicit EpochCtrlOp(int32_t num_epoch); + + // Destructor + ~EpochCtrlOp(); + + // A print method typically used for debugging + // @param out - The output stream to write output to + // @param show_all - A bool to control if you want to show all info or just a summary + void Print(std::ostream &out, bool show_all) const override; + + // This function returns the buffer that is at the top of our output connector. The caller is + // typically our parent node, when the parent is asking us to provide the next buffer of data. + // Since EpochCtrlOp is derived from RepeatOp which is an inlined op, getting a buffer from us + // will simply bounce you to get a buffer from our child. + // Epoch Control Op does not eat the EOE, it will pass the EOE to the next op. + Status GetNextBuffer(std::unique_ptr *p_buffer, int32_t worker_id, bool retry_if_eoe) override; + + // Base-class override for handling cases when an eoe is received. + // @param worker_id - The worker id + Status EoeReceived(int32_t worker_id) override; + + /// \brief Base-class override for NodePass pre-visit acceptor + /// \param[in] p The node to visit + /// \param[out] modified Indicator if the node was modified + /// \return Status of the node visit + Status PreAccept(NodePass *p, bool *modified) override; + + /// \brief Base-class override for NodePass visitor acceptor + /// \param[in] p The node to visit + /// \param[out] modified Indicator if the node was modified + /// \return Status of the node visit + Status Accept(NodePass *p, bool *modified) override; +}; +} // namespace dataset +} // namespace mindspore + +#endif // DATASET_ENGINE_DATASETOPS_EPOCH_CTRL_OP_H_ diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/repeat_op.cc b/mindspore/ccsrc/minddata/dataset/engine/datasetops/repeat_op.cc index 6d3dc91ed3..b5319a8a71 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/repeat_op.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/repeat_op.cc @@ -132,6 +132,7 @@ Status RepeatOp::EoeReceived(int32_t worker_id) { // Invoke a reset against the eoe nodes only. for (auto &eoe_op : eoe_ops_) { + MS_LOG(DEBUG) << "Repeat operator sending reset to operator: " << eoe_op->id(); RETURN_IF_NOT_OK(eoe_op->Reset()); } @@ -167,8 +168,9 @@ int32_t RepeatOp::num_consumers() const { Status RepeatOp::Reset() { // If there's nested repeats, an ascendant repeat may have ourself listed as an eoe op. // In that case, we now have to bounce the reset down to our own eoe ops. - MS_LOG(DEBUG) << "Repeat operator (" << operator_id_ << ") reset."; + MS_LOG(DEBUG) << "Repeat operator " << operator_id_ << " got reset."; for (auto &eoe_op : eoe_ops_) { + MS_LOG(DEBUG) << "Nested repeat operator bouncing a reset to operator: " << eoe_op->id(); RETURN_IF_NOT_OK(eoe_op->Reset()); } state_ = OpState::kDeOpRunning; diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/repeat_op.h b/mindspore/ccsrc/minddata/dataset/engine/datasetops/repeat_op.h index f5259de30e..ded1f3ccd7 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/repeat_op.h +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/repeat_op.h @@ -46,7 +46,7 @@ class RepeatOp : public PipelineOp { // @return shared_ptr to the new RepeatOp object Status Build(std::shared_ptr *); - private: + protected: int32_t build_max_repeats_; Status SanityCheck() const; @@ -131,11 +131,11 @@ class RepeatOp : public PipelineOp { // @return Name of the current Op std::string Name() const override { return "RepeatOp"; } - /// \brief Adds an operator to the repeat ops list of tracked leaf/eoe nodes - /// \param[in] eoe_op The input leaf/eoe operator to add to the list + // \brief Adds an operator to the repeat ops list of tracked leaf/eoe nodes + // \param[in] eoe_op The input leaf/eoe operator to add to the list void AddToEoeList(std::shared_ptr eoe_op) { eoe_ops_.push_back(std::move(eoe_op)); } - private: + protected: int32_t max_repeats_; // The number of repeats that the user requested int32_t repeat_count_; // A counter for the current number of executed repeats std::vector> eoe_ops_; // List of operators that can generate EOE underneath this repeat. diff --git a/mindspore/ccsrc/minddata/dataset/engine/datasetops/zip_op.cc b/mindspore/ccsrc/minddata/dataset/engine/datasetops/zip_op.cc index 88019c30fc..0df6375b59 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/datasetops/zip_op.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/datasetops/zip_op.cc @@ -132,8 +132,9 @@ Status ZipOp::prepare(TensorQTable *const table) { if (eof_) { return Status::OK(); } + // One of our child iterators encounter EOE. Returns and proceed with draining phase. if (new_row.empty()) { - return Status(StatusCode::kUnexpectedError, __LINE__, __FILE__, "ZipOp prepare phase got empty row!"); + return Status::OK(); } // Pack this first row into our tensor table diff --git a/mindspore/ccsrc/minddata/dataset/engine/execution_tree.cc b/mindspore/ccsrc/minddata/dataset/engine/execution_tree.cc index 55dec24e79..16039012f9 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/execution_tree.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/execution_tree.cc @@ -23,6 +23,7 @@ #include "minddata/dataset/engine/opt/pre/removal_pass.h" #include "minddata/dataset/engine/opt/pre/cache_transform_pass.h" #include "minddata/dataset/engine/opt/post/repeat_pass.h" +#include "minddata/dataset/engine/opt/pre/injection_pass.h" #include "mindspore/ccsrc/minddata/dataset/engine/opt/optional/tensor_op_fusion_pass.h" #include "minddata/dataset/engine/perf/profiling.h" #include "minddata/dataset/engine/perf/monitor.h" @@ -50,11 +51,11 @@ Status ExecutionTree::AssociateNode(const std::shared_ptr &op) { if (op->tree_ == this) { return Status::OK(); } - if (tree_state_ != kDeTStateInit && tree_state_ != kDeTStateBuilding) { + if (tree_state_ != kDeTStateInit && tree_state_ != kDeTStateBuilding && tree_state_ != kDeTStatePrepare) { std::string err_msg = "Invalid tree state for adding a node. Current state: " + std::to_string(static_cast(tree_state_)) + " Expected states: " + std::to_string(static_cast(kDeTStateInit)) + " or " + - std::to_string(static_cast(kDeTStateBuilding)); + std::to_string(static_cast(kDeTStateBuilding)) + " or " + std::to_string(static_cast(kDeTStatePrepare)); RETURN_STATUS_UNEXPECTED(err_msg); } @@ -200,7 +201,9 @@ Status ExecutionTree::LaunchWorkers(int32_t num_workers, std::functionPrepareTreePreAction()); @@ -222,6 +225,7 @@ Status ExecutionTree::PrepareTreePreAction() { std::vector> pre_actions; // Construct pre actions MS_LOG(INFO) << "Running pre pass loops."; + pre_actions.push_back(std::make_unique()); pre_actions.push_back(std::make_unique()); pre_actions.push_back(std::make_unique()); // Apply pre action passes @@ -278,6 +282,11 @@ Status ExecutionTree::PrepareDeprecated() { " Expected state: " + std::to_string(static_cast(kDeTStatePrepare)); RETURN_STATUS_UNEXPECTED(err_msg); } + + if (root_ == nullptr) { + RETURN_STATUS_UNEXPECTED("Please assign one operator as the root of this tree."); + } + // Start the recursive prepare RETURN_IF_NOT_OK(this->PrepareNode(root_)); tree_state_ = kDeTStateReady; diff --git a/mindspore/ccsrc/minddata/dataset/engine/execution_tree.h b/mindspore/ccsrc/minddata/dataset/engine/execution_tree.h index b62bf8e85d..5767f35248 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/execution_tree.h +++ b/mindspore/ccsrc/minddata/dataset/engine/execution_tree.h @@ -176,7 +176,7 @@ class ExecutionTree { // For example, repeatOp inlining // // @return Status - The error code return - Status Prepare(); + Status Prepare(int num_epochs = -1); // Compulsory transformation/action pre optimization. // @return Status - The error code return @@ -193,6 +193,7 @@ class ExecutionTree { // The DEPRECATED driver of the prepare phase of the execution tree. The prepare phase will recursively // walk the tree to perform modifications to the tree or specific nodes within the tree to get // it ready for execution. + // @param Total number of epochs that will be run on this tree // @return Status - The error code return Status PrepareDeprecated(); @@ -231,6 +232,10 @@ class ExecutionTree { // Optional optimizations status bool OptimizationEnabled() const { return optimize_; } + // Getter function to get the total number of epochs to be run on this tree. + // @return total number of epochs + int32_t num_epochs() { return num_epochs_; } + private: // A helper functions for doing the recursive printing // @param dataset_op - The dataset op to print @@ -245,6 +250,7 @@ class ExecutionTree { int32_t id_count_; // Counter for generating operator id's uint32_t prepare_flags_; // Flags used during tree prepare TreeState tree_state_; // Tracking the current tree state + int32_t num_epochs_; // Total number of epochs to run for this tree std::unique_ptr perf_monitor_; // Performance Monitor std::unique_ptr profiling_manager_; // Profiling manager bool optimize_; // Flag to enable optional optimizations diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/CMakeLists.txt b/mindspore/ccsrc/minddata/dataset/engine/opt/CMakeLists.txt index 0ab1fb7925..c7f861b75f 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/opt/CMakeLists.txt +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/CMakeLists.txt @@ -5,6 +5,7 @@ add_library(engine-opt OBJECT post/repeat_pass.cc pre/cache_pass.cc pre/cache_transform_pass.cc + pre/injection_pass.cc pre/removal_nodes.cc pre/removal_pass.cc optional/tensor_op_fusion_pass.cc diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/pass.cc b/mindspore/ccsrc/minddata/dataset/engine/opt/pass.cc index 4a8bbaf38f..dcb81c3c32 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/opt/pass.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/pass.cc @@ -16,11 +16,13 @@ #include "minddata/dataset/engine/opt/pass.h" #include "minddata/dataset/engine/datasetops/batch_op.h" +#include "minddata/dataset/engine/datasetops/build_vocab_op.h" #include "minddata/dataset/engine/datasetops/cache_op.h" #include "minddata/dataset/engine/datasetops/cache_merge_op.h" #include "minddata/dataset/engine/datasetops/cache_lookup_op.h" #include "minddata/dataset/engine/datasetops/dataset_op.h" #include "minddata/dataset/engine/datasetops/device_queue_op.h" +#include "minddata/dataset/engine/datasetops/epoch_ctrl_op.h" #include "minddata/dataset/engine/datasetops/map_op.h" #include "minddata/dataset/engine/datasetops/project_op.h" #include "minddata/dataset/engine/datasetops/rename_op.h" @@ -230,6 +232,11 @@ Status NodePass::RunOnNode(std::shared_ptr node, bool *modified) return RunOnNode(std::static_pointer_cast(node), modified); } +Status NodePass::RunOnNode(std::shared_ptr node, bool *modified) { + // Fallback to base class visitor by default + return RunOnNode(std::static_pointer_cast(node), modified); +} + Status NodePass::PreRunOnNode(std::shared_ptr node, bool *modified) { // Fallback to base class visitor by default return PreRunOnNode(std::static_pointer_cast(node), modified); @@ -244,5 +251,15 @@ Status NodePass::PreRunOnNode(std::shared_ptr node, bool *modified // Fallback to base class visitor by default return PreRunOnNode(std::static_pointer_cast(node), modified); } + +Status NodePass::PreRunOnNode(std::shared_ptr node, bool *modified) { + // Fallback to base class visitor by default + return PreRunOnNode(std::static_pointer_cast(node), modified); +} + +Status NodePass::PreRunOnNode(std::shared_ptr node, bool *modified) { + // Fallback to base class visitor by default + return PreRunOnNode(std::static_pointer_cast(node), modified); +} } // namespace dataset } // namespace mindspore diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/pass.h b/mindspore/ccsrc/minddata/dataset/engine/opt/pass.h index 845ab34d66..fa27cde6b9 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/opt/pass.h +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/pass.h @@ -77,6 +77,10 @@ class CacheMergeOp; class CacheLookupOp; +class EpochCtrlOp; + +class BuildVocabOp; + // The base class Pass is the basic unit of tree transformation. // The actual implementation of the passes will be derived from here. class Pass : public std::enable_shared_from_this { @@ -190,12 +194,18 @@ class NodePass : public Pass { virtual Status RunOnNode(std::shared_ptr node, bool *modified); + virtual Status RunOnNode(std::shared_ptr node, bool *modified); + virtual Status PreRunOnNode(std::shared_ptr node, bool *modified); virtual Status PreRunOnNode(std::shared_ptr node, bool *modified); virtual Status PreRunOnNode(std::shared_ptr node, bool *modified); + virtual Status PreRunOnNode(std::shared_ptr node, bool *modified); + + virtual Status PreRunOnNode(std::shared_ptr node, bool *modified); + private: // Helper function to perform DFS visit Status DFSNodeVisit(std::shared_ptr node, bool *modified); diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/post/repeat_pass.cc b/mindspore/ccsrc/minddata/dataset/engine/opt/post/repeat_pass.cc index 59a3f71c53..cf8852bf44 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/opt/post/repeat_pass.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/post/repeat_pass.cc @@ -20,6 +20,7 @@ #include "minddata/dataset/engine/datasetops/cache_op.h" #include "minddata/dataset/engine/datasetops/cache_lookup_op.h" #include "minddata/dataset/engine/datasetops/cache_merge_op.h" +#include "minddata/dataset/engine/datasetops/epoch_ctrl_op.h" namespace mindspore { namespace dataset { @@ -28,6 +29,9 @@ RepeatPass::RepeatPass() : is_repeated_(false), nested_repeats_(0), is_merge_(fa // Identifies the subtree below this node as being in a repeated path of the tree. Status RepeatPass::PreRunOnNode(std::shared_ptr node, bool *modified) { + // Create a new stack for eoe operators and push onto our stack of stacks. + std::unique_ptr new_stack = std::make_unique(); + eoe_op_stacks_.push(std::move(new_stack)); // If we are already repeated, then this is a nested repeat. if (is_repeated_) { nested_repeats_++; @@ -36,6 +40,18 @@ Status RepeatPass::PreRunOnNode(std::shared_ptr node, bool *modified) return Status::OK(); } +// Identifies the subtree below this node as being in a repeated path of the tree. +Status RepeatPass::PreRunOnNode(std::shared_ptr node, bool *modified) { + // EpochCtrl is derived from RepeatOp. Generally it should do the identical setup + // that RepeatOp does. However, epoch control is actually simpler because it can + // only exist as the root node so it doesn't need all the nested code. + // Create a new stack for eoe operators and push onto our stack of stacks. + std::unique_ptr new_stack = std::make_unique(); + eoe_op_stacks_.push(std::move(new_stack)); + is_repeated_ = true; + return Status::OK(); +} + // Identifies the subtree below this node as being in a cache merge path Status RepeatPass::PreRunOnNode(std::shared_ptr node, bool *modified) { // Turn on the flag that we're under a merge op @@ -47,13 +63,24 @@ Status RepeatPass::PreRunOnNode(std::shared_ptr node, bool *modifi Status RepeatPass::RunOnNode(std::shared_ptr node, bool *modified) { // Pop the leaf ops from the save-area stack and add them to the repeat op's eoe node tracking std::shared_ptr leaf_op = PopFromEOEOpStack(); + while (leaf_op != nullptr) { node->AddToEoeList(leaf_op); leaf_op = PopFromEOEOpStack(); } + // At this point, we are done with the save area stack. It's a unique pointer to an empty stack + // at this time, so we can pop it to get rid of it. + eoe_op_stack *current_stack = eoe_op_stacks_.top().get(); + if (!current_stack->empty()) { + RETURN_STATUS_UNEXPECTED("The eoe op stack should be empty right now!"); + } + eoe_op_stacks_.pop(); + // We are a repeat op in the descendant tree of a merge op, then we take the saved lookup up - // and add it to the list of eoe/leaf ops for the repeat, removing it from the save area. + // and add it to the list of eoe/leaf ops for the repeat. It is important that the op is removed + // from the save area, because the merge op above us may also take action on it later for a different + // case when there is no repeat in the merge leg. if (is_merge_ && cache_lookup_) { cache_lookup_->set_control_flag(DatasetOp::kDeOpRepeated); node->AddToEoeList(std::move(cache_lookup_)); @@ -65,16 +92,29 @@ Status RepeatPass::RunOnNode(std::shared_ptr node, bool *modified) { node->set_control_flag(DatasetOp::kDeOpRepeated); AddToEOEOpStack(node); nested_repeats_--; - } - - // If we are not nested, or we were the top-most repeat, now we clear the flag - if (nested_repeats_ == 0) { + } else { + // If we are not nested, or we were the top-most repeat, now we clear the flag + if (nested_repeats_ != 0) { + RETURN_STATUS_UNEXPECTED("Nested repeat counter cannot be negative!"); + } is_repeated_ = false; } return Status::OK(); } +// Hooks up any identified eoe nodes under this repeat. +Status RepeatPass::RunOnNode(std::shared_ptr node, bool *modified) { + // Pop the leaf ops from the save-area stack and add them to the eoe node tracking + std::shared_ptr leaf_op = PopFromEOEOpStack(); + while (leaf_op != nullptr) { + node->AddToEoeList(leaf_op); + leaf_op = PopFromEOEOpStack(); + } + is_repeated_ = false; + return Status::OK(); +} + // CacheOp removes previous leaf ops and replaces them with itself Status RepeatPass::RunOnNode(std::shared_ptr node, bool *modified) { if (is_repeated_) { @@ -118,9 +158,16 @@ Status RepeatPass::RunOnNode(std::shared_ptr node, bool *modified) { // Turns off the tracking for operations under merge op Status RepeatPass::RunOnNode(std::shared_ptr node, bool *modified) { // Setting the flag is needed since we didn't call the base class DatasetOp version - if (is_repeated_) node->set_control_flag(DatasetOp::kDeOpRepeated); + if (is_repeated_) { + node->set_control_flag(DatasetOp::kDeOpRepeated); + // If there was not any repeat in the merge cache miss leg, then the cache_lookup + // would not have been consumed yet. In that case, we need to assign it to the upper repeat eoe stack + if (cache_lookup_) { + AddToEOEOpStack(std::move(cache_lookup_)); + } + } + cache_lookup_.reset(); // If we are not repeated then the saved lookup is no longer needed or used is_merge_ = false; - cache_lookup_.reset(); // If a repeat op did not consume this then it's no longer needed return Status::OK(); } @@ -135,25 +182,32 @@ Status RepeatPass::RunOnNode(std::shared_ptr node, bool *modified // In this case, we naturally are a repeating leaf op so add the required setup for leafs under repeat here. if (is_repeated_) { node->set_control_flag(DatasetOp::kDeOpRepeated); - AddToEOEOpStack(node); - } else { - // save the lookup op. There could be a repeat in the cache miss leg of the merge op, in which case we - // may still need to be flagged as a repeating leaf. We can't decide that here though, so save ourself - // into the pass so that the decision can be made during the processing of the cache miss leg of the merge. - cache_lookup_ = std::static_pointer_cast(node); + // Delay the assigment of this leap to the eoe stack and allow the merge op processing to handle that. } + + // save the lookup op. There could be a repeat in the cache miss leg of the merge op, in which case we + // may still need to be flagged as a repeating leaf. We can't decide that here though, so save ourself + // into the pass so that the decision can be made during the processing of the cache miss leg of the merge. + // Further, if there's a repeat above the merge but no repeat in the cache miss leg, then the merge op will + // add the lookup to the eoe stack + cache_lookup_ = std::static_pointer_cast(node); + return Status::OK(); } // Adds an operator to the eoe operator stack save area -void RepeatPass::AddToEOEOpStack(std::shared_ptr dataset_op) { eoe_stack_.push(dataset_op); } +void RepeatPass::AddToEOEOpStack(std::shared_ptr dataset_op) { + eoe_op_stack *current_stack = eoe_op_stacks_.top().get(); + current_stack->push(dataset_op); +} // Pops an operator from the eoe operator stack save area std::shared_ptr RepeatPass::PopFromEOEOpStack() { std::shared_ptr top_op = nullptr; - if (!eoe_stack_.empty()) { - top_op = eoe_stack_.top(); - eoe_stack_.pop(); + eoe_op_stack *current_stack = eoe_op_stacks_.top().get(); + if (current_stack != nullptr && !current_stack->empty()) { + top_op = current_stack->top(); + current_stack->pop(); } return top_op; } diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/post/repeat_pass.h b/mindspore/ccsrc/minddata/dataset/engine/opt/post/repeat_pass.h index 9b733e2329..8ef09dff19 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/opt/post/repeat_pass.h +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/post/repeat_pass.h @@ -30,6 +30,8 @@ namespace dataset { /// to the eoe-producing (typically leaf) nodes underneath it. class RepeatPass : public NodePass { public: + using eoe_op_stack = std::stack>; + /// \brief Constructor RepeatPass(); @@ -39,6 +41,12 @@ class RepeatPass : public NodePass { /// \return Status The error code return Status PreRunOnNode(std::shared_ptr node, bool *modified) override; + /// \brief Identifies the subtree below this node as being in a repeated path of the tree. + /// \param[in] node The node being visited + /// \param[inout] modified Indicator if the node was changed at all + /// \return Status The error code return + Status PreRunOnNode(std::shared_ptr node, bool *modified) override; + /// \brief Identifies the subtree below this node as being in a cache merge path /// \param[in] node The node being visited /// \param[inout] modified Indicator if the node was changed at all @@ -51,6 +59,12 @@ class RepeatPass : public NodePass { /// \return Status The error code return Status RunOnNode(std::shared_ptr node, bool *modified) override; + /// \brief Hooks up any identified eoe nodes under this repeat. + /// \param[in] node The node being visited + /// \param[inout] modified Indicator if the node was changed at all + /// \return Status The error code return + Status RunOnNode(std::shared_ptr node, bool *modified) override; + /// \brief CacheOp removes previous leaf ops and replaces them with itself /// \param[in] node The node being visited /// \param[inout] modified Indicator if the node was changed at all @@ -86,11 +100,11 @@ class RepeatPass : public NodePass { /// \return shared_ptr to the popped operator std::shared_ptr PopFromEOEOpStack(); - bool is_repeated_; // T/F if we are processing under a repeat - bool is_merge_; // T/F if we are processing under a cache merge op - int32_t nested_repeats_; // A counter for nested repeats - std::stack> eoe_stack_; // A save area for leaf/eoe ops - std::shared_ptr cache_lookup_; // A save area for a cache lookup op + bool is_repeated_; // T/F if we are processing under a repeat + bool is_merge_; // T/F if we are processing under a cache merge op + int32_t nested_repeats_; // A counter for nested repeats + std::stack> eoe_op_stacks_; // A save area for leaf/eoe ops (with nesting) + std::shared_ptr cache_lookup_; // A save area for a cache lookup op }; } // namespace dataset } // namespace mindspore diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/pre/injection_pass.cc b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/injection_pass.cc new file mode 100644 index 0000000000..9581ea2b17 --- /dev/null +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/injection_pass.cc @@ -0,0 +1,82 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include "minddata/dataset/engine/opt/pre/injection_pass.h" +#include "minddata/dataset/engine/execution_tree.h" +#include "minddata/dataset/engine/datasetops/epoch_ctrl_op.h" +#include "minddata/dataset/engine/datasetops/device_queue_op.h" + +namespace mindspore { +namespace dataset { + +// constructor +InjectionPass::InjectionFinder::InjectionFinder(InjectionPass *injection_pass) : injection_pass_(injection_pass) {} + +// Performs finder work for BuildVocabOp that has special rules about epoch control injection +Status InjectionPass::InjectionFinder::PreRunOnNode(std::shared_ptr node, bool *modified) { + if (injection_pass_) { + injection_pass_->epoch_ctrl_bypass_ = true; + return Status::OK(); + } else { + RETURN_STATUS_UNEXPECTED("Missing outer injection pass object from inside InjectionFinder!"); + } +} + +// Temporary code to prevent the injection of epoch control when cache op is present +// Remove this code in cache op phase 2 +Status InjectionPass::InjectionFinder::PreRunOnNode(std::shared_ptr node, bool *modified) { + if (injection_pass_) { + injection_pass_->epoch_ctrl_bypass_ = true; + return Status::OK(); + } else { + RETURN_STATUS_UNEXPECTED("Missing outer injection pass object from inside InjectionFinder!"); + } +} + +// constructor +InjectionPass::InjectionPass() : epoch_ctrl_bypass_(false) {} + +// Runs an injection pass to inject in operators needed at the pre pass stage +Status InjectionPass::RunOnTree(ExecutionTree *tree, bool *modified) { + MS_LOG(INFO) << "Pre pass: Injection pass started."; + + // First, run the finder to perform any injection info before we can go ahead to drive the op injection work. + // The finder can make updates to the InjectionPass object. + InjectionPass::InjectionFinder finder(this); + finder.Run(tree, modified); + + // The first injection logic is to check if we should inject the epoch control op as the root node. + // Do not inject the op if the number of epochs is 1. + int32_t num_epochs = tree->num_epochs(); + if (num_epochs != 1 && !epoch_ctrl_bypass_) { + std::shared_ptr epoch_ctrl_op; + RETURN_IF_NOT_OK(EpochCtrlOp::Builder(num_epochs).Build(&epoch_ctrl_op)); + RETURN_IF_NOT_OK(tree->AssociateNode(epoch_ctrl_op)); + std::shared_ptr node = tree->root(); + if (std::dynamic_pointer_cast(node) == nullptr) { + tree->root()->InsertAsParent(epoch_ctrl_op); + } else { + tree->root()->child(0)->InsertAsParent(epoch_ctrl_op); + } + } + + MS_LOG(INFO) << "Pre pass: Injection pass complete."; + return Status::OK(); +} +} // namespace dataset +} // namespace mindspore diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/pre/injection_pass.h b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/injection_pass.h new file mode 100644 index 0000000000..eacc6bb0a9 --- /dev/null +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/injection_pass.h @@ -0,0 +1,75 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DATASET_ENGINE_OPT_PASS_PRE_INJECTION_PASS_H_ +#define DATASET_ENGINE_OPT_PASS_PRE_INJECTION_PASS_H_ + +#include +#include +#include "minddata/dataset/engine/opt/pass.h" + +namespace mindspore { +namespace dataset { + +class DatasetOp; + +/// \class InjectionPass injection_pass.h +/// \brief This is a pre pass that drives the injection of any nodes that could not be directly injected from the api +/// parsing. +class InjectionPass : public TreePass { + /// \class InjectionFinder + /// \brief This is a nested node pass class who's job is to parse the tree and perform any identification logic for + /// operators that need to be injected. It is run first by the main injection pass to find out what operators + /// it may need to inject. + class InjectionFinder : public NodePass { + public: + /// \brief Constructor + explicit InjectionFinder(InjectionPass *injection_pass); + + /// \brief Performs finder work for BuildVocabOp that has special rules about epoch control injection. + /// \param[in] node The node being visited + /// \param[inout] modified Indicator if the node was changed at all + /// \return Status The error code return + Status PreRunOnNode(std::shared_ptr node, bool *modified) override; + + /// \brief Temporary code to prevent the injection of epoch control when cache op is present. + /// Remove this code in cache op phase 2 + /// \param[in] node The node being visited + /// \param[inout] modified Indicator if the node was changed at all + /// \return Status The error code return + Status PreRunOnNode(std::shared_ptr node, bool *modified) override; + + private: + InjectionPass *injection_pass_; + }; + + public: + /// \brief Constructor + InjectionPass(); + + /// \brief Runs an injection pass to inject in operators needed at the pre pass stage + /// \param[inout] tree The tree to operate on. + /// \param[inout] Indicate of the tree was modified. + /// \return Status The error code return + Status RunOnTree(ExecutionTree *tree, bool *modified) override; + + private: + bool epoch_ctrl_bypass_; +}; +} // namespace dataset +} // namespace mindspore + +#endif // DATASET_ENGINE_OPT_PASS_PRE_INJECTION_PASS_H_ diff --git a/mindspore/ccsrc/minddata/dataset/engine/tdt/tdt_plugin.cc b/mindspore/ccsrc/minddata/dataset/engine/tdt/tdt_plugin.cc index 126291179a..d04f5a08d2 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/tdt/tdt_plugin.cc +++ b/mindspore/ccsrc/minddata/dataset/engine/tdt/tdt_plugin.cc @@ -29,20 +29,27 @@ std::shared_ptr TdtPlugin::GetInstance() { return instance_ptr_; } -TdtStatus TdtPlugin::hostPush(TensorRow ts_row, bool is_wait, std::string channel_name, bool profiling, int32_t &time) { +TdtStatus TdtPlugin::hostPush(TensorRow ts_row, bool is_wait, std::string channel_name, bool profiling, int32_t &time, + tdt::TdtDataType tdt_type) { MS_LOG(DEBUG) << "TDT channel name is " << channel_name << "."; std::vector items; double start_time; - auto ret = translate(ts_row, items); - if (ret != SUCCESS) { - MS_LOG(ERROR) << "TDT converting tensor failed!"; - return FAILED; + if (tdt_type == tdt::TDT_TENSOR) { + auto ret = translate(ts_row, items); + if (ret != SUCCESS) { + MS_LOG(ERROR) << "TDT converting tensor failed!"; + return FAILED; + } + } else if (tdt_type == tdt::TDT_END_OF_SEQUENCE) { + DataItem data_item; + data_item.dataType_ = tdt::TDT_END_OF_SEQUENCE; + items.emplace_back(data_item); + MS_LOG(INFO) << "TDT data type is TDT_END_OF_SEQUENCE"; } if (profiling) { start_time = ProfilingTime::GetCurMilliSecond(); } if (tdt::TdtHostPushData(channel_name, items) != 0) { - MS_LOG(ERROR) << "TDT pushing data failed!"; return FAILED; } if (profiling) { @@ -122,8 +129,8 @@ TdtStatus TdtPlugin::translate(const TensorRow &ts_row, std::vector &i data_item.dataPtr_ = std::shared_ptr(reinterpret_cast(&(*ts->begin())), [](const void *elem) {}); items.emplace_back(data_item); - MS_LOG(DEBUG) << "TDT data type is " << datatype << ", data shape is " << dataShapes << ", data length is " - << ts->Size() << "."; + MS_LOG(INFO) << "TDT data type is TDT_TENSOR, tensor type is " << datatype << ", tensor shape is " << dataShapes + << ", data length is " << ts->Size() << "."; } return SUCCESS; } diff --git a/mindspore/ccsrc/minddata/dataset/engine/tdt/tdt_plugin.h b/mindspore/ccsrc/minddata/dataset/engine/tdt/tdt_plugin.h index a7db08b7f5..275b1774fa 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/tdt/tdt_plugin.h +++ b/mindspore/ccsrc/minddata/dataset/engine/tdt/tdt_plugin.h @@ -38,7 +38,8 @@ class TdtPlugin { public: static std::shared_ptr GetInstance(); - TdtStatus hostPush(TensorRow ts_row, bool is_wait, std::string channel_name, bool profilig, int32_t &time); + TdtStatus hostPush(TensorRow ts_row, bool is_wait, std::string channel_name, bool profilig, int32_t &time, + tdt::TdtDataType tdt_type = tdt::TDT_TENSOR); private: TdtPlugin() {} diff --git a/mindspore/ccsrc/pipeline/jit/pipeline.cc b/mindspore/ccsrc/pipeline/jit/pipeline.cc index 36be63387e..24d9315143 100644 --- a/mindspore/ccsrc/pipeline/jit/pipeline.cc +++ b/mindspore/ccsrc/pipeline/jit/pipeline.cc @@ -797,6 +797,9 @@ bool InitExecDataset(const std::string &queue_name, int64_t iter_num, int64_t ba (void)InitBackend(); } #endif + if (iter_num == -1) { + iter_num = INT32_MAX; + } if (name == kMsConvert || name == kMsVm) { return InitExecDatasetVm(queue_name, iter_num, batch_size, types, shapes, input_indexes, need_run); } diff --git a/mindspore/dataset/engine/datasets.py b/mindspore/dataset/engine/datasets.py index f3136cefae..35f3bc497f 100644 --- a/mindspore/dataset/engine/datasets.py +++ b/mindspore/dataset/engine/datasets.py @@ -44,7 +44,7 @@ from .validators import check_batch, check_shuffle, check_map, check_filter, che check_take, check_project, check_imagefolderdatasetv2, check_mnist_cifar_dataset, check_manifestdataset, \ check_tfrecorddataset, check_vocdataset, check_cocodataset, check_celebadataset, check_minddataset, \ check_generatordataset, check_sync_wait, check_zip_dataset, check_add_column, check_textfiledataset, check_concat, \ - check_random_dataset, check_split, check_bucket_batch_by_length, check_cluedataset, check_positive_int32, check_save + check_random_dataset, check_split, check_bucket_batch_by_length, check_cluedataset, check_save from ..core.datatypes import mstype_to_detype, mstypelist_to_detypelist try: @@ -946,14 +946,14 @@ class Dataset: raise TypeError("apply_func must return a dataset.") return dataset - @check_positive_int32 - def device_que(self, prefetch_size=None): + def device_que(self, prefetch_size=None, send_epoch_end=True): """ Return a transferredDataset that transfer data through device. Args: prefetch_size (int, optional): prefetch number of records ahead of the user's request (default=None). + send_epoch_end (bool, optional): whether send end of sequence to device or not.(default=True) Note: If device is Ascend, features of data will be transferred one by one. The limitation @@ -962,15 +962,14 @@ class Dataset: Return: TransferDataset, dataset for transferring. """ - return self.to_device() + return self.to_device(send_epoch_end=send_epoch_end) - @check_positive_int32 - def to_device(self, num_batch=None): + def to_device(self, send_epoch_end=True): """ Transfer data through CPU, GPU or Ascend devices. Args: - num_batch (int, optional): limit the number of batch to be sent to device (default=None). + send_epoch_end (bool, optional): whether send end of sequence to device or not.(default=True) Note: If device is Ascend, features of data will be transferred one by one. The limitation @@ -982,19 +981,9 @@ class Dataset: Raises: TypeError: If device_type is empty. ValueError: If device_type is not 'Ascend', 'GPU' or 'CPU'. - ValueError: If num_batch is not positive or larger than int_max. - ValueError: If dataset size is None or 0. RuntimeError: If dataset is unknown. RuntimeError: If distribution file path is given but failed to read. """ - if self.get_dataset_size() is None or 0: - raise ValueError("dataset size is None or 0.") - - if num_batch is None: - num_batch = self.get_dataset_size() - repeat_count = self.get_repeat_count() - num_batch = num_batch * repeat_count - queue_name = str(uuid.uuid1()) if context: @@ -1008,9 +997,6 @@ class Dataset: if device_type not in ('Ascend', 'GPU', 'CPU'): raise ValueError("Only support CPU, Ascend, GPU") - if num_batch == 0: - raise ValueError("num_batch is 0.") - def get_distribution(output_dataset): dev_id = 0 if isinstance(output_dataset, (Cifar10Dataset, Cifar100Dataset, GeneratorDataset, ImageFolderDatasetV2, @@ -1032,7 +1018,7 @@ class Dataset: distribution_path, device_id = get_distribution(self) if distribution_path == "": - return TransferDataset(self, queue_name, device_id, device_type, num_batch) + return TransferDataset(self, queue_name, device_id, device_type, send_epoch_end) try: with open(distribution_path, 'r') as distribution_f: dist = json.load(distribution_f) @@ -1042,7 +1028,7 @@ class Dataset: except Exception: raise RuntimeError("Distribution file failed to read") - return TransferDataset(self, queue_name, device_id, device_type, num_batch) + return TransferDataset(self, queue_name, device_id, device_type, send_epoch_end) @check_save def save(self, file_name, num_files=1, file_type='mindrecord'): @@ -1072,7 +1058,7 @@ class Dataset: return SaveOp(self).save(file_names, file_type) - def create_tuple_iterator(self, columns=None): + def create_tuple_iterator(self, columns=None, num_epochs=-1): """ Create an Iterator over the dataset. The data retrieved will be a list of ndarray of data. @@ -1098,9 +1084,9 @@ class Dataset: """ if self._noop_mode(): return DummyIterator(self, 'tuple') - return TupleIterator(self, columns) + return TupleIterator(self, columns, num_epochs) - def create_dict_iterator(self): + def create_dict_iterator(self, num_epochs=-1): """ Create an Iterator over the dataset. @@ -1123,7 +1109,7 @@ class Dataset: """ if self._noop_mode(): return DummyIterator(self, 'dict') - return DictIterator(self) + return DictIterator(self, num_epochs) def __iter__(self): """Create an Iterator over the dataset.""" @@ -1149,7 +1135,7 @@ class Dataset: self._batch_size = device_iter.get_batch_size() self._num_classes = device_iter.num_classes() self._repeat_count = device_iter.get_repeat_count() - device_iter.release() + device_iter.stop() def output_shapes(self): """ @@ -2085,7 +2071,7 @@ class RepeatDataset(DatasetOp): """ child_size = self.children[0].get_dataset_size() if child_size is not None: - return child_size + return child_size * self.count return None def get_repeat_count(self): @@ -2097,7 +2083,6 @@ class RepeatDataset(DatasetOp): """ return self.count - class SkipDataset(DatasetOp): """ The result of applying Skip operator to the input Dataset. @@ -2317,10 +2302,10 @@ class TransferDataset(DatasetOp): queue_name (str): Name of device queue. device_id (int): Id of device. device_type (str): Type of device, including "CPU", "GPU", and "Ascend". - num_batch (int): limit the number of batch to be sent to device (default=None). + send_epoch_end (bool, optional): Whether send end of sequence to device or not.(default=True) """ - def __init__(self, input_dataset, queue_name, device_id, device_type, num_batch=None): + def __init__(self, input_dataset, queue_name, device_id, device_type, send_epoch_end=True): super().__init__() self.children.append(input_dataset) input_dataset.parent.append(self) @@ -2328,7 +2313,7 @@ class TransferDataset(DatasetOp): self._input_indexs = input_dataset.input_indexs self._device_type = device_type self._device_id = device_id - self.__num_batch = num_batch + self._send_epoch_end = send_epoch_end self.iterator = None def get_args(self): @@ -2336,13 +2321,13 @@ class TransferDataset(DatasetOp): args["queue_name"] = self.queue_name args["device_type"] = self._device_type args["device_id"] = self._device_id - args["num_batch"] = self.__num_batch + args["send_epoch_end"] = self._send_epoch_end return args - def create_dict_iterator(self): + def create_dict_iterator(self, num_epochs=-1): raise RuntimeError("TransferDataset is not iterable") - def create_tuple_iterator(self, columns=None): + def create_tuple_iterator(self, columns=None, num_epochs=-1): raise RuntimeError("TransferDataset is not iterable") def __iter__(self): @@ -2354,12 +2339,14 @@ class TransferDataset(DatasetOp): def output_types(self): raise RuntimeError("TransferDataset does not support output_types") - def send(self): + def send(self, num_epochs=-1): # need to keep iterator alive so the executionTree is not destroyed if self._noop_mode(): return - self.iterator = TupleIterator(self) + self.iterator = TupleIterator(self, num_epochs=-1) + def stop_send(self): + self.iterator.depipeline.StopSend() class RangeDataset(MappableDataset): """ diff --git a/mindspore/dataset/engine/iterators.py b/mindspore/dataset/engine/iterators.py index 45da971840..f96e58758d 100644 --- a/mindspore/dataset/engine/iterators.py +++ b/mindspore/dataset/engine/iterators.py @@ -29,7 +29,6 @@ from . import datasets as de ITERATORS_LIST = list() - def _cleanup(): """Release all the Iterator.""" for itr_ref in ITERATORS_LIST: @@ -60,7 +59,6 @@ def _alter_node(node): node.iterator_bootstrap() return node - class Iterator: """ General Iterator over a dataset. @@ -69,10 +67,21 @@ class Iterator: dataset: Dataset to be iterated over """ - def __init__(self, dataset): + def __init__(self, dataset, num_epochs=-1): + self.num_epochs = num_epochs ITERATORS_LIST.append(weakref.ref(self)) # create a copy of tree and work on it. self.dataset = copy.deepcopy(dataset) + self.parent_subtree = [] + + # The dataset passed into the iterator is not the root of the tree. + # Trim the tree by saving the parent subtree into self.parent_subtree and + # restore it after launching our c++ pipeline. + if self.dataset.parent: + logger.warning("The dataset passed in is not the root of the pipeline. Ignoring parent subtree.") + self.parent_subtree = self.dataset.parent + self.dataset.parent = [] + self.dataset = alter_tree(self.dataset) if not self.__is_tree(): raise ValueError("The data pipeline is not a tree (i.e., one node has 2 consumers)") @@ -83,9 +92,17 @@ class Iterator: root = self.__convert_node_postorder(self.dataset) self.depipeline.AssignRootNode(root) - self.depipeline.LaunchTreeExec() + self.depipeline.LaunchTreeExec(self.num_epochs) self._index = 0 + def stop(self): + """ + Manually terminate python iterator instead of relying on out of scope destruction. + """ + logger.info("terminating python iterator. This will also terminate c++ pipeline.") + if hasattr(self, 'depipeline') and self.depipeline: + del self.depipeline + def __is_tree_node(self, node): """Check if a node is tree node.""" if not node.children: @@ -214,9 +231,14 @@ class Iterator: @abstractmethod def get_next(self): - pass + raise RuntimeError("Calling base class Iterator's get_next is invalid.") def __next__(self): + if not self.depipeline: + logger.warning("Iterator does not have a running c++ pipeline." + + "It can be because Iterator stop() had been called, or c++ pipeline crashed silently.") + raise RuntimeError("Iterator does not have a running c++ pipeline.") + data = self.get_next() if not data: if self._index == 0: @@ -293,12 +315,12 @@ class TupleIterator(Iterator): def check_node_type(self, node): pass - def __init__(self, dataset, columns=None): + def __init__(self, dataset, columns=None, num_epochs=-1): if columns is not None: if not isinstance(columns, list): columns = [columns] dataset = dataset.project(columns) - super().__init__(dataset) + super().__init__(dataset, num_epochs) def __iter__(self): return self diff --git a/mindspore/train/_utils.py b/mindspore/train/_utils.py index 85fd6fa189..5d0ae10081 100644 --- a/mindspore/train/_utils.py +++ b/mindspore/train/_utils.py @@ -57,7 +57,8 @@ def _exec_datagraph(exec_dataset, dataset_size, phase='dataset'): # transform data format dataset_types, dataset_shapes = _get_types_and_shapes(exec_dataset) - exec_dataset = exec_dataset.device_que() + send_epoch_end = bool(dataset_size == -1) + exec_dataset = exec_dataset.device_que(send_epoch_end=send_epoch_end) _executor.init_dataset(exec_dataset.queue_name, dataset_size, @@ -126,7 +127,7 @@ def _construct_tensor_list(types, shapes, batch_expand_num=1): def _to_tensor(elem, scaling_sens=None): - """Conver numpy to tensor, adapt to minddata feed solution.""" + """Convert numpy to tensor, adapt to feed the data from host solution.""" lst = [] if not isinstance(elem, (tuple, list)): elem = [elem] @@ -145,7 +146,8 @@ def _to_tensor(elem, scaling_sens=None): def _to_full_tensor(elem, device_num, global_rank, scaling_sens=None): - """Conver numpy to tensor, expanding batch dimension according to device_num, adapt to minddata feed solution.""" + """Convert numpy to tensor, expanding batch dimension according to device_num, adapt to feed the data + from host solution.""" lst = [] if not isinstance(elem, (tuple, list)): elem = [elem] diff --git a/mindspore/train/dataset_helper.py b/mindspore/train/dataset_helper.py index 75e1deabc4..ea2b8b498f 100644 --- a/mindspore/train/dataset_helper.py +++ b/mindspore/train/dataset_helper.py @@ -16,7 +16,7 @@ import math import os -from mindspore._checkparam import check_bool +from mindspore._checkparam import check_bool, check_int from .. import context from ._utils import _exec_datagraph, _get_types_and_shapes, _to_tensor, \ _construct_tensor_list, _to_full_shapes, _to_full_tensor @@ -42,17 +42,23 @@ class DatasetHelper: The iter of DatasetHelper will give one epoch data. Args: - dataset (DataSet): The dataset. - dataset_sink_mode (bool): If true use GetNext to fetch the data, or else feed the data from host. - Default: True. + dataset (DataSet): The training dataset iterator. + dataset_sink_mode (bool): If true use GetNext to fetch the data, or else feed the data from host. Default: True. + sink_size (int): Control the amount of data each sink. + If sink_size=-1, sink the complete dataset each epoch. + If sink_size>0, sink sink_size data each epoch. Default: -1. Examples: >>> dataset_helper = DatasetHelper(dataset) >>> for inputs in dataset_helper: >>> outputs = network(*inputs) """ - def __init__(self, dataset, dataset_sink_mode=True): + + def __init__(self, dataset, dataset_sink_mode=True, sink_size=-1): check_bool(dataset_sink_mode) + check_int(sink_size) + if sink_size < -1 or sink_size == 0: + raise ValueError("The sink_size must be -1 or positive, but got sink_size {}.".format(sink_size)) if dataset_sink_mode: if context.get_context("enable_ge"): @@ -68,9 +74,10 @@ class DatasetHelper: iterclass = _DatasetIterMS elif context.get_context("device_target") == "CPU": raise RuntimeError("Currently dataset sink mode is not supported when the device target is CPU.") + self.iter = iterclass(dataset, sink_size) else: - iterclass = _DatasetIterFeed - self.iter = iterclass(dataset) + iterclass = _DatasetIterNormal + self.iter = iterclass(dataset) def __iter__(self): return self.iter.__iter__() @@ -80,21 +87,26 @@ class DatasetHelper: """Get the types and shapes from dataset on current config.""" return self.iter.types_shapes() - def loop_size(self): - """Get loop_size for every iteration.""" - return self.iter.loop_size + def sink_size(self): + """Get sink_size for every iteration.""" + return self.iter.get_sink_size() + + def stop_send(self): + """Free up resources about data sink.""" + self.iter.stop_send() class _DatasetIter: - """Base iter for dataset help""" - def __init__(self, dataset): - if not hasattr(dataset, '__loop_size__'): - self.loop_size = dataset.get_dataset_size() - else: - self.loop_size = dataset.__loop_size__ + """Base iter for dataset helper""" + def __init__(self, dataset, sink_size): + self.dataset = dataset + self.sink_size = sink_size + self.sink_count = 1 - if not hasattr(dataset, '__ME_INITED__'): - dataset.__TRANSFER_DATASET__ = _exec_datagraph(dataset, self.loop_size) + if not hasattr(dataset, '__TRANSFER_DATASET__'): + if hasattr(dataset, '__loop_size__'): + self.sink_size = dataset.__loop_size__ + dataset.__TRANSFER_DATASET__ = _exec_datagraph(dataset, self.sink_size) dataset.__ME_INITED__ = dataset.__TRANSFER_DATASET__.queue_name if not hasattr(dataset, '__no_send__'): @@ -102,43 +114,70 @@ class _DatasetIter: else: _send_data(dataset) - self.ind = 0 - self.dataset = dataset - dataset_types, dataset_shapes = _get_types_and_shapes(dataset) - self.dataset_types, self.dataset_shapes = dataset_types, dataset_shapes + self.stop_send = dataset.__TRANSFER_DATASET__.stop_send + self.dataset_types, self.dataset_shapes = _get_types_and_shapes(dataset) def __iter__(self): - self.ind = 0 + self.index = 0 return self def __next__(self): - if self.ind >= self.loop_count: + if self.index >= self.sink_count: raise StopIteration() - self.ind += 1 + self.index += 1 return self.op() def types_shapes(self): return self.dataset_types, self.dataset_shapes - def get_loop_count(self, dataset): - loop_count = 1 + def get_sink_count(self, dataset): + sink_count = 1 if hasattr(dataset, '__loop_size__'): loop_size = dataset.__loop_size__ if loop_size <= dataset.get_dataset_size() and dataset.get_dataset_size() % loop_size != 0: raise ValueError(f'Dataset size {dataset.get_dataset_size()} and ' - f'loop_size {loop_size} are not matched.') - loop_count = math.ceil(dataset.get_dataset_size() / loop_size) - return loop_count + f'sink_size {loop_size} are not matched.') + sink_count = math.ceil(dataset.get_dataset_size() / loop_size) + return sink_count + + def get_sink_size(self): + """get sink_size to device""" + sink_size = 1 + if hasattr(self.dataset, '__loop_size__'): + sink_size = self.dataset.__loop_size__ + else: + if context.get_context("enable_ge") or context.get_context("device_target") == "Ascend": + if self.sink_size > 0: + sink_size = self.sink_size + else: + sink_size = self.dataset.get_dataset_size() + return sink_size + + +class _DatasetIterGE(_DatasetIter): + """Iter for GE.""" + def __init__(self, dataset, sink_size): + super().__init__(dataset, sink_size) + self.sink_count = self.get_sink_count(dataset) + batch_expand_num = 1 + if _need_to_full(): + batch_expand_num = _get_device_num() + tensor_list_run = _construct_tensor_list(self.dataset_types, self.dataset_shapes, batch_expand_num) + + def op(): + return tensor_list_run + + self.op = op class _DatasetIterMSLoopSink(_DatasetIter): """Iter for context (device_target=Ascend)""" - def __init__(self, dataset): - super(_DatasetIterMSLoopSink, self).__init__(dataset) - self.loop_count = self.get_loop_count(dataset) + def __init__(self, dataset, sink_size): + super().__init__(dataset, sink_size) + self.sink_count = self.get_sink_count(dataset) ms_role = os.getenv("MS_ROLE") if ms_role in ("MS_PSERVER", "MS_SCHED"): - self.loop_count = 1 + self.sink_count = 1 # for self._parallel_mode equal to semi_auto_parallel or auto_parallel, and not using full_batch, # use a complete tensor to compile, and slice tensor to run. The batch dimension of tensors for # compile is device_number times the batch dimension of tensors for run. Now only support LoopSink. @@ -153,66 +192,42 @@ class _DatasetIterMSLoopSink(_DatasetIter): class _DatasetIterMS(_DatasetIter): - """Iter for context (device_target=GPU)""" - def __init__(self, dataset): - super(_DatasetIterMS, self).__init__(dataset) - self.loop_count = dataset.get_dataset_size() - self.loop_size = 1 + """Iter for MS(enable_loop_sink=False).""" + def __init__(self, dataset, sink_size): + super().__init__(dataset, sink_size) + if sink_size > 0: + self.sink_count = sink_size + else: + self.sink_count = dataset.get_dataset_size() + queue_name = dataset.__ME_INITED__ self.op = GetNextSingleOp(self.dataset_types, self.dataset_shapes, queue_name) class _DatasetIterPSLite(_DatasetIter): """Iter for context (device_target=GPU) on MS_PSERVER or MS_SCHED""" - def __init__(self, dataset): - super(_DatasetIterPSLite, self).__init__(dataset) - self.loop_count = 1 - self.loop_size = 1 + def __init__(self, dataset, sink_size): + super().__init__(dataset, sink_size) + self.sink_count = 1 + self.sink_size = 1 self.op = None def op(): return _construct_tensor_list(self.dataset_types, self.dataset_shapes, batch_expand_num=1) self.op = op -class _DatasetIterGE(_DatasetIter): - """Iter for ge""" - def __init__(self, dataset): - super(_DatasetIterGE, self).__init__(dataset) - self.loop_count = self.get_loop_count(dataset) - batch_expand_num = 1 - if _need_to_full(): - batch_expand_num = _get_device_num() - tensor_list_run = _construct_tensor_list(self.dataset_types, self.dataset_shapes, batch_expand_num) - - def op(): - return tensor_list_run - - self.op = op - - -class _DatasetIterFeed: +class _DatasetIterNormal: """Iter for normal(non sink) mode, feed the data from host.""" def __init__(self, dataset): self.dataset = dataset self.device_num = _get_device_num() self.global_rank = _get_global_rank() - self.repeat_count = dataset.get_repeat_count() - self.repeat_ind = 0 - self.loop_count = dataset.get_dataset_size() - self.ind = 0 def __iter__(self): - if self.repeat_ind % self.repeat_count == 0: - self.iter = self.dataset.__iter__() - - self.repeat_ind += 1 - self.ind = 0 + self.iter = self.dataset.create_tuple_iterator() return self def __next__(self): - if self.ind >= self.loop_count: - raise StopIteration() - self.ind += 1 data = self.iter.__next__() if _need_to_full(): return _to_full_tensor(data, self.device_num, self.global_rank) diff --git a/mindspore/train/model.py b/mindspore/train/model.py index 74fd668e82..54128c66ce 100755 --- a/mindspore/train/model.py +++ b/mindspore/train/model.py @@ -21,7 +21,7 @@ import numpy as np from mindspore import log as logger from ..common.tensor import Tensor from ..nn.metrics import get_metrics -from .._checkparam import check_input_data, check_output_data, check_int_positive, check_bool +from .._checkparam import check_input_data, check_output_data, check_int_positive, check_bool, check_int from .callback import _InternalCallbackParam, RunContext, _CallbackManager from .. import context from ..parallel._utils import _get_parallel_mode, _get_device_num, _get_global_rank, \ @@ -225,7 +225,7 @@ class Model: scaling_sens /= self._device_number return scaling_sens - def _exec_preprocess(self, network, is_train, phase, dataset, dataset_sink_mode): + def _exec_preprocess(self, network, is_train, phase, dataset, dataset_sink_mode, sink_size=-1): """Initializes dataset.""" need_wrap = False if dataset_sink_mode: @@ -237,7 +237,7 @@ class Model: if not is_train: dataset.__loop_size__ = 1 - dataset_helper = DatasetHelper(dataset, dataset_sink_mode) + dataset_helper = DatasetHelper(dataset, dataset_sink_mode, sink_size) # remove later to deal with loop sink if need_wrap: @@ -317,7 +317,7 @@ class Model: self._eval_network.compile(*inputs) break - def _train(self, epoch, train_dataset, callbacks=None, dataset_sink_mode=True): + def _train(self, epoch, train_dataset, callbacks=None, dataset_sink_mode=True, sink_size=-1): """ Training. @@ -332,6 +332,7 @@ class Model: dataset_sink_mode (bool): Determines whether to pass the data through dataset channel. Default: True. Configure pynative mode, the training process will be performed with dataset not sink. + sink_size (int): Control the amount of data each sink. Default: -1. """ epoch = check_int_positive(epoch) self._train_network.set_train() @@ -342,7 +343,10 @@ class Model: cb_params = _InternalCallbackParam() cb_params.train_network = self._train_network cb_params.epoch_num = epoch - cb_params.batch_num = train_dataset.get_dataset_size() + if dataset_sink_mode and sink_size > 0: + cb_params.batch_num = sink_size + else: + cb_params.batch_num = train_dataset.get_dataset_size() cb_params.mode = "train" cb_params.loss_fn = self._loss_fn cb_params.optimizer = self._optimizer @@ -364,7 +368,7 @@ class Model: "So the training process will be performed with dataset not sink.") self._train_process(epoch, train_dataset, list_callback, cb_params) else: - self._train_dataset_sink_process(epoch, train_dataset, list_callback, cb_params) + self._train_dataset_sink_process(epoch, train_dataset, list_callback, cb_params, sink_size) @staticmethod def _transform_callbacks(callbacks): @@ -377,7 +381,7 @@ class Model: return [callbacks] - def _train_dataset_sink_process(self, epoch, train_dataset, list_callback=None, cb_params=None): + def _train_dataset_sink_process(self, epoch, train_dataset, list_callback=None, cb_params=None, sink_size=-1): """ Training process. The data would be passed to network through dataset channel. @@ -390,17 +394,18 @@ class Model: function respectively. list_callback (Callback): Executor of callback list. Default: None. cb_params (_InternalCallbackParam): Callback parameters. Default: None. + sink_size (int): Control the amount of data each sink. Default: -1. """ dataset_helper, train_network = self._exec_preprocess(self._train_network, is_train=True, phase='train', dataset=train_dataset, - dataset_sink_mode=True) + dataset_sink_mode=True, + sink_size=sink_size) self._train_network = train_network cb_params.train_network = self._train_network cb_params.cur_step_num = 0 - loop_size = dataset_helper.loop_size() run_context = RunContext(cb_params) list_callback.begin(run_context) @@ -412,9 +417,9 @@ class Model: # for data sink dataset_helper only iter once, other wise iter epoch_size times. for inputs in dataset_helper: - cb_params.cur_step_num += loop_size list_callback.step_begin(run_context) outputs = self._train_network(*inputs) + cb_params.cur_step_num += dataset_helper.sink_size() cb_params.net_outputs = outputs list_callback.step_end(run_context) @@ -422,6 +427,7 @@ class Model: should_stop = should_stop or run_context.get_stop_requested() if should_stop: break + dataset_helper.stop_send() list_callback.end(run_context) @@ -490,7 +496,7 @@ class Model: list_callback.end(run_context) - def train(self, epoch, train_dataset, callbacks=None, dataset_sink_mode=True): + def train(self, epoch, train_dataset, callbacks=None, dataset_sink_mode=True, sink_size=-1): """ Training API where the iteration is controlled by python front-end. @@ -515,7 +521,10 @@ class Model: dataset_sink_mode (bool): Determines whether to pass the data through dataset channel. Default: True. Configure pynative mode, the training process will be performed with dataset not sink. - + sink_size (int): Control the amount of data each sink. + If sink_size=-1, sink the complete dataset each epoch. + If sink_size>0, sink sink_size data each epoch. + If dataset_sink_mode is False, set sink_size invalid. Default: -1. Examples: >>> dataset = get_dataset() @@ -526,17 +535,19 @@ class Model: >>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None, loss_scale_manager=loss_scale_manager) >>> model.train(2, dataset) """ - repeat_count = train_dataset.get_repeat_count() - if epoch != repeat_count and dataset_sink_mode is True: - logger.warning(f"The epoch_size {epoch} is not the same with dataset repeat_count {repeat_count}") check_bool(dataset_sink_mode) + check_int(sink_size) + if sink_size < -1 or sink_size == 0: + raise ValueError("The sink_size must be -1 or positive, but got sink_size {}.".format(sink_size)) + _device_number_check(self._parallel_mode, self._device_number) _parameter_broadcast_check(self._parallel_mode, self._parameter_broadcast) self._train(epoch, train_dataset, callbacks=callbacks, - dataset_sink_mode=dataset_sink_mode) + dataset_sink_mode=dataset_sink_mode, + sink_size=sink_size) def _eval_dataset_sink_process(self, valid_dataset, list_callback=None, cb_params=None): """ diff --git a/model_zoo/alexnet/train.py b/model_zoo/alexnet/train.py index df038d62a2..4512244b92 100644 --- a/model_zoo/alexnet/train.py +++ b/model_zoo/alexnet/train.py @@ -43,7 +43,7 @@ if __name__ == "__main__": context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target) - ds_train = create_dataset_cifar10(args.data_path, cfg.batch_size, cfg.epoch_size) + ds_train = create_dataset_cifar10(args.data_path, cfg.batch_size, 1) network = AlexNet(cfg.num_classes) loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean") lr = Tensor(get_lr(0, cfg.learning_rate, cfg.epoch_size, ds_train.get_dataset_size())) diff --git a/model_zoo/bert/run_classifier.py b/model_zoo/bert/run_classifier.py index 4b2801f87c..73f41a858b 100644 --- a/model_zoo/bert/run_classifier.py +++ b/model_zoo/bert/run_classifier.py @@ -36,12 +36,11 @@ from mindspore.train.serialization import load_checkpoint, load_param_into_net _cur_dir = os.getcwd() -def do_train(dataset=None, network=None, load_checkpoint_path="", save_checkpoint_path=""): +def do_train(dataset=None, network=None, load_checkpoint_path="", save_checkpoint_path="", epoch_num=1): """ do train """ if load_checkpoint_path == "": raise ValueError("Pretrain model missed, finetune task must load pretrain model!") steps_per_epoch = dataset.get_dataset_size() - epoch_num = dataset.get_repeat_count() # optimizer if optimizer_cfg.optimizer == 'AdamWeightDecayDynamicLR': optimizer = AdamWeightDecayDynamicLR(network.trainable_params(), @@ -176,11 +175,11 @@ def run_classifier(): assessment_method=assessment_method) if args_opt.do_train.lower() == "true": - ds = create_classification_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=epoch_num, + ds = create_classification_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=1, assessment_method=assessment_method, data_file_path=args_opt.train_data_file_path, schema_file_path=args_opt.schema_file_path) - do_train(ds, netwithloss, load_pretrain_checkpoint_path, save_finetune_checkpoint_path) + do_train(ds, netwithloss, load_pretrain_checkpoint_path, save_finetune_checkpoint_path, epoch_num) if args_opt.do_eval.lower() == "true": if save_finetune_checkpoint_path == "": @@ -191,7 +190,7 @@ def run_classifier(): ds.get_dataset_size(), epoch_num, "classifier") if args_opt.do_eval.lower() == "true": - ds = create_classification_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=epoch_num, + ds = create_classification_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=1, assessment_method=assessment_method, data_file_path=args_opt.eval_data_file_path, schema_file_path=args_opt.schema_file_path) diff --git a/model_zoo/bert/run_ner.py b/model_zoo/bert/run_ner.py index a61c96066e..4b1a1cead7 100644 --- a/model_zoo/bert/run_ner.py +++ b/model_zoo/bert/run_ner.py @@ -38,12 +38,11 @@ from mindspore.train.serialization import load_checkpoint, load_param_into_net _cur_dir = os.getcwd() -def do_train(dataset=None, network=None, load_checkpoint_path="", save_checkpoint_path=""): +def do_train(dataset=None, network=None, load_checkpoint_path="", save_checkpoint_path="", epoch_num=1): """ do train """ if load_checkpoint_path == "": raise ValueError("Pretrain model missed, finetune task must load pretrain model!") steps_per_epoch = dataset.get_dataset_size() - epoch_num = dataset.get_repeat_count() # optimizer if optimizer_cfg.optimizer == 'AdamWeightDecayDynamicLR': optimizer = AdamWeightDecayDynamicLR(network.trainable_params(), @@ -204,10 +203,10 @@ def run_ner(): use_crf=(args_opt.use_crf.lower() == "true"), tag_to_index=tag_to_index, dropout_prob=0.1) if args_opt.do_train.lower() == "true": - ds = create_ner_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=epoch_num, + ds = create_ner_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=1, assessment_method=assessment_method, data_file_path=args_opt.train_data_file_path, schema_file_path=args_opt.schema_file_path) - do_train(ds, netwithloss, load_pretrain_checkpoint_path, save_finetune_checkpoint_path) + do_train(ds, netwithloss, load_pretrain_checkpoint_path, save_finetune_checkpoint_path, epoch_num) if args_opt.do_eval.lower() == "true": if save_finetune_checkpoint_path == "": @@ -218,7 +217,7 @@ def run_ner(): ds.get_dataset_size(), epoch_num, "ner") if args_opt.do_eval.lower() == "true": - ds = create_ner_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=epoch_num, + ds = create_ner_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=1, assessment_method=assessment_method, data_file_path=args_opt.eval_data_file_path, schema_file_path=args_opt.schema_file_path) do_eval(ds, BertNER, args_opt.use_crf, number_labels, assessment_method, args_opt.eval_data_file_path, diff --git a/model_zoo/bert/run_pretrain.py b/model_zoo/bert/run_pretrain.py index 7123c942f3..54769c011b 100644 --- a/model_zoo/bert/run_pretrain.py +++ b/model_zoo/bert/run_pretrain.py @@ -100,11 +100,12 @@ def run_pretrain(): bert_net_cfg.compute_type = mstype.float32 - ds, new_repeat_count = create_bert_dataset(args_opt.epoch_size, device_num, rank, args_opt.do_shuffle, - args_opt.enable_data_sink, args_opt.data_sink_steps, - args_opt.data_dir, args_opt.schema_dir) + ds = create_bert_dataset(1, device_num, rank, args_opt.do_shuffle, + args_opt.enable_data_sink, args_opt.data_sink_steps, + args_opt.data_dir, args_opt.schema_dir) + new_repeat_count = args_opt.epoch_size if args_opt.train_steps > 0: - new_repeat_count = min(new_repeat_count, args_opt.train_steps // args_opt.data_sink_steps) + new_repeat_count = min(args_opt.epoch_size, args_opt.train_steps // args_opt.data_sink_steps) netwithloss = BertNetworkWithLoss(bert_net_cfg, True) if cfg.optimizer == 'Lamb': diff --git a/model_zoo/bert/run_squad.py b/model_zoo/bert/run_squad.py index 083cedac1d..55fa2a0fc3 100644 --- a/model_zoo/bert/run_squad.py +++ b/model_zoo/bert/run_squad.py @@ -38,12 +38,11 @@ from mindspore.train.serialization import load_checkpoint, load_param_into_net _cur_dir = os.getcwd() -def do_train(dataset=None, network=None, load_checkpoint_path="", save_checkpoint_path=""): +def do_train(dataset=None, network=None, load_checkpoint_path="", save_checkpoint_path="", epoch_num=1): """ do train """ if load_checkpoint_path == "": raise ValueError("Pretrain model missed, finetune task must load pretrain model!") steps_per_epoch = dataset.get_dataset_size() - epoch_num = dataset.get_repeat_count() # optimizer if optimizer_cfg.optimizer == 'AdamWeightDecayDynamicLR': optimizer = AdamWeightDecayDynamicLR(network.trainable_params(), @@ -181,10 +180,10 @@ def run_squad(): netwithloss = BertSquad(bert_net_cfg, True, 2, dropout_prob=0.1) if args_opt.do_train.lower() == "true": - ds = create_squad_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=epoch_num, + ds = create_squad_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=1, data_file_path=args_opt.train_data_file_path, schema_file_path=args_opt.schema_file_path) - do_train(ds, netwithloss, load_pretrain_checkpoint_path, save_finetune_checkpoint_path) + do_train(ds, netwithloss, load_pretrain_checkpoint_path, save_finetune_checkpoint_path, epoch_num) if args_opt.do_eval.lower() == "true": if save_finetune_checkpoint_path == "": load_finetune_checkpoint_dir = _cur_dir @@ -194,7 +193,7 @@ def run_squad(): ds.get_dataset_size(), epoch_num, "squad") if args_opt.do_eval.lower() == "true": - ds = create_squad_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=epoch_num, + ds = create_squad_dataset(batch_size=bert_net_cfg.batch_size, repeat_count=1, data_file_path=args_opt.eval_data_file_path, schema_file_path=args_opt.schema_file_path, is_training=False) do_eval(ds, args_opt.vocab_file_path, args_opt.eval_json_path, diff --git a/model_zoo/bert/src/dataset.py b/model_zoo/bert/src/dataset.py index e530718d4f..097b2c1e89 100644 --- a/model_zoo/bert/src/dataset.py +++ b/model_zoo/bert/src/dataset.py @@ -54,7 +54,6 @@ def create_bert_dataset(epoch_size=1, device_num=1, rank=0, do_shuffle="true", e ds = ds.map(input_columns="input_ids", operations=type_cast_op) # apply batch operations ds = ds.batch(bert_net_cfg.batch_size, drop_remainder=True) - ds = ds.repeat(max(new_repeat_count, repeat_count)) logger.info("data size: {}".format(ds.get_dataset_size())) logger.info("repeatcount: {}".format(ds.get_repeat_count())) return ds, new_repeat_count diff --git a/model_zoo/deepfm/train.py b/model_zoo/deepfm/train.py index 228d04c0d3..ff110cd5ab 100644 --- a/model_zoo/deepfm/train.py +++ b/model_zoo/deepfm/train.py @@ -57,7 +57,7 @@ if __name__ == '__main__': ds_train = create_dataset(args_opt.dataset_path, train_mode=True, - epochs=train_config.train_epochs, + epochs=1, batch_size=train_config.batch_size, data_type=DataType(data_config.data_format), rank_size=rank_size, @@ -82,7 +82,7 @@ if __name__ == '__main__': if args_opt.do_eval: ds_eval = create_dataset(args_opt.dataset_path, train_mode=False, - epochs=train_config.train_epochs, + epochs=1, batch_size=train_config.batch_size, data_type=DataType(data_config.data_format)) eval_callback = EvalCallBack(model, ds_eval, auc_metric, diff --git a/model_zoo/deeplabv3/train.py b/model_zoo/deeplabv3/train.py index d096613977..39d50e51cc 100644 --- a/model_zoo/deeplabv3/train.py +++ b/model_zoo/deeplabv3/train.py @@ -66,7 +66,7 @@ if __name__ == "__main__": init() args_opt.base_size = config.crop_size args_opt.crop_size = config.crop_size - train_dataset = create_dataset(args_opt, args_opt.data_url, config.epoch_size, config.batch_size, usage="train") + train_dataset = create_dataset(args_opt, args_opt.data_url, 1, config.batch_size, usage="train") dataset_size = train_dataset.get_dataset_size() time_cb = TimeMonitor(data_size=dataset_size) callback = [time_cb, LossCallBack()] diff --git a/model_zoo/faster_rcnn/train.py b/model_zoo/faster_rcnn/train.py index 7d5f190bab..2e6a35be6d 100644 --- a/model_zoo/faster_rcnn/train.py +++ b/model_zoo/faster_rcnn/train.py @@ -94,7 +94,7 @@ if __name__ == '__main__': loss_scale = float(config.loss_scale) # When create MindDataset, using the fitst mindrecord file, such as FasterRcnn.mindrecord0. - dataset = create_fasterrcnn_dataset(mindrecord_file, repeat_num=config.epoch_size, + dataset = create_fasterrcnn_dataset(mindrecord_file, repeat_num=1, batch_size=config.batch_size, device_num=device_num, rank_id=rank) dataset_size = dataset.get_dataset_size() diff --git a/model_zoo/googlenet/train.py b/model_zoo/googlenet/train.py index 0129176510..a7c72e2763 100644 --- a/model_zoo/googlenet/train.py +++ b/model_zoo/googlenet/train.py @@ -78,7 +78,7 @@ if __name__ == '__main__': mirror_mean=True) init() - dataset = create_dataset(cfg.data_path, cfg.epoch_size) + dataset = create_dataset(cfg.data_path, 1) batch_num = dataset.get_dataset_size() net = GoogleNet(num_classes=cfg.num_classes) diff --git a/model_zoo/lenet/train.py b/model_zoo/lenet/train.py index 740b6e8ca3..2282842188 100644 --- a/model_zoo/lenet/train.py +++ b/model_zoo/lenet/train.py @@ -45,8 +45,7 @@ if __name__ == "__main__": context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target) ds_train = create_dataset(os.path.join(args.data_path, "train"), - cfg.batch_size, - cfg.epoch_size) + cfg.batch_size) network = LeNet5(cfg.num_classes) net_loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean") diff --git a/model_zoo/lenet_quant/train.py b/model_zoo/lenet_quant/train.py index 03e9ff62bd..66546b15c0 100644 --- a/model_zoo/lenet_quant/train.py +++ b/model_zoo/lenet_quant/train.py @@ -44,7 +44,7 @@ args = parser.parse_args() if __name__ == "__main__": context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target) - ds_train = create_dataset(os.path.join(args.data_path, "train"), cfg.batch_size, cfg.epoch_size) + ds_train = create_dataset(os.path.join(args.data_path, "train"), cfg.batch_size, 1) step_size = ds_train.get_dataset_size() # define fusion network diff --git a/model_zoo/lstm/train.py b/model_zoo/lstm/train.py index 51ae12c685..53c3a89a6a 100644 --- a/model_zoo/lstm/train.py +++ b/model_zoo/lstm/train.py @@ -77,7 +77,7 @@ if __name__ == '__main__': model = Model(network, loss, opt, {'acc': Accuracy()}) print("============== Starting Training ==============") - ds_train = lstm_create_dataset(args.preprocess_path, cfg.batch_size, cfg.num_epochs) + ds_train = lstm_create_dataset(args.preprocess_path, cfg.batch_size, 1) config_ck = CheckpointConfig(save_checkpoint_steps=cfg.save_checkpoint_steps, keep_checkpoint_max=cfg.keep_checkpoint_max) ckpoint_cb = ModelCheckpoint(prefix="lstm", directory=args.ckpt_path, config=config_ck) diff --git a/model_zoo/mass/train.py b/model_zoo/mass/train.py index b58075ba4e..4d297aa518 100644 --- a/model_zoo/mass/train.py +++ b/model_zoo/mass/train.py @@ -249,7 +249,7 @@ def train_parallel(config: TransformerConfig): pre_train_dataset = load_dataset( data_files=config.pre_train_dataset, - batch_size=config.batch_size, epoch_count=config.epochs, + batch_size=config.batch_size, epoch_count=1, sink_mode=config.dataset_sink_mode, sink_step=config.dataset_sink_step, rank_size=MultiAscend.get_group_size(), @@ -257,7 +257,7 @@ def train_parallel(config: TransformerConfig): ) if config.pre_train_dataset else None fine_tune_dataset = load_dataset( data_files=config.fine_tune_dataset, - batch_size=config.batch_size, epoch_count=config.epochs, + batch_size=config.batch_size, epoch_count=1, sink_mode=config.dataset_sink_mode, sink_step=config.dataset_sink_step, rank_size=MultiAscend.get_group_size(), @@ -265,7 +265,7 @@ def train_parallel(config: TransformerConfig): ) if config.fine_tune_dataset else None test_dataset = load_dataset( data_files=config.test_dataset, - batch_size=config.batch_size, epoch_count=config.epochs, + batch_size=config.batch_size, epoch_count=1, sink_mode=config.dataset_sink_mode, sink_step=config.dataset_sink_step, rank_size=MultiAscend.get_group_size(), @@ -288,17 +288,17 @@ def train_single(config: TransformerConfig): print(" | Starting training on single device.") pre_train_dataset = load_dataset(data_files=config.pre_train_dataset, batch_size=config.batch_size, - epoch_count=config.epochs, + epoch_count=1, sink_mode=config.dataset_sink_mode, sink_step=config.dataset_sink_step) if config.pre_train_dataset else None fine_tune_dataset = load_dataset(data_files=config.fine_tune_dataset, batch_size=config.batch_size, - epoch_count=config.epochs, + epoch_count=1, sink_mode=config.dataset_sink_mode, sink_step=config.dataset_sink_step) if config.fine_tune_dataset else None test_dataset = load_dataset(data_files=config.test_dataset, batch_size=config.batch_size, - epoch_count=config.epochs, + epoch_count=1, sink_mode=config.dataset_sink_mode, sink_step=config.dataset_sink_step) if config.test_dataset else None diff --git a/model_zoo/mobilenetv2/train.py b/model_zoo/mobilenetv2/train.py index 4ae743f540..736933eb31 100644 --- a/model_zoo/mobilenetv2/train.py +++ b/model_zoo/mobilenetv2/train.py @@ -180,7 +180,7 @@ if __name__ == '__main__': do_train=True, config=config_gpu, platform=args_opt.platform, - repeat_num=epoch_size, + repeat_num=1, batch_size=config_gpu.batch_size) step_size = dataset.get_dataset_size() # resume @@ -239,7 +239,7 @@ if __name__ == '__main__': do_train=True, config=config_ascend, platform=args_opt.platform, - repeat_num=epoch_size, + repeat_num=1, batch_size=config_ascend.batch_size) step_size = dataset.get_dataset_size() if args_opt.pre_trained: diff --git a/model_zoo/mobilenetv2_quant/train.py b/model_zoo/mobilenetv2_quant/train.py index 1302c3cf27..0b745b50c5 100644 --- a/model_zoo/mobilenetv2_quant/train.py +++ b/model_zoo/mobilenetv2_quant/train.py @@ -86,7 +86,7 @@ if __name__ == '__main__': do_train=True, config=config, device_target=args_opt.device_target, - repeat_num=epoch_size, + repeat_num=1, batch_size=config.batch_size) step_size = dataset.get_dataset_size() # load pre trained ckpt diff --git a/model_zoo/mobilenetv3/train.py b/model_zoo/mobilenetv3/train.py index 57199ec1a7..5f2a3502ac 100644 --- a/model_zoo/mobilenetv3/train.py +++ b/model_zoo/mobilenetv3/train.py @@ -181,7 +181,7 @@ if __name__ == '__main__': do_train=True, config=config_gpu, platform=args_opt.platform, - repeat_num=epoch_size, + repeat_num=1, batch_size=config_gpu.batch_size) step_size = dataset.get_dataset_size() # resume @@ -240,7 +240,7 @@ if __name__ == '__main__': do_train=True, config=config_ascend, platform=args_opt.platform, - repeat_num=epoch_size, + repeat_num=1, batch_size=config_ascend.batch_size) step_size = dataset.get_dataset_size() if args_opt.pre_trained: diff --git a/model_zoo/official/nlp/transformer/src/dataset.py b/model_zoo/official/nlp/transformer/src/dataset.py index 5b006046a5..84dc5427b2 100644 --- a/model_zoo/official/nlp/transformer/src/dataset.py +++ b/model_zoo/official/nlp/transformer/src/dataset.py @@ -17,7 +17,6 @@ import mindspore.common.dtype as mstype import mindspore.dataset.engine.datasets as de import mindspore.dataset.transforms.c_transforms as deC -from mindspore import log as logger from .config import transformer_net_cfg def create_transformer_dataset(epoch_count=1, rank_size=1, rank_id=0, do_shuffle="true", enable_data_sink="true", @@ -42,7 +41,4 @@ def create_transformer_dataset(epoch_count=1, rank_size=1, rank_id=0, do_shuffle ds = ds.batch(transformer_net_cfg.batch_size, drop_remainder=True) ds = ds.repeat(repeat_count) - ds.channel_name = 'transformer' - logger.info("data size: {}".format(ds.get_dataset_size())) - logger.info("repeatcount: {}".format(ds.get_repeat_count())) - return ds, repeat_count + return ds diff --git a/model_zoo/official/nlp/transformer/train.py b/model_zoo/official/nlp/transformer/train.py index ffd6b8c714..f84c4214e3 100644 --- a/model_zoo/official/nlp/transformer/train.py +++ b/model_zoo/official/nlp/transformer/train.py @@ -125,10 +125,10 @@ def run_transformer_train(): else: device_num = 1 rank_id = 0 - dataset, repeat_count = create_transformer_dataset(epoch_count=args.epoch_size, rank_size=device_num, - rank_id=rank_id, do_shuffle=args.do_shuffle, - enable_data_sink=args.enable_data_sink, - dataset_path=args.data_path) + dataset = create_transformer_dataset(epoch_count=1, rank_size=device_num, + rank_id=rank_id, do_shuffle=args.do_shuffle, + enable_data_sink=args.enable_data_sink, + dataset_path=args.data_path) netwithloss = TransformerNetworkWithLoss(transformer_net_cfg, True) @@ -165,7 +165,7 @@ def run_transformer_train(): netwithgrads.set_train(True) model = Model(netwithgrads) - model.train(repeat_count, dataset, callbacks=callbacks, dataset_sink_mode=(args.enable_data_sink == "true")) + model.train(args.epoch_size, dataset, callbacks=callbacks, dataset_sink_mode=(args.enable_data_sink == "true")) if __name__ == '__main__': run_transformer_train() diff --git a/model_zoo/resnet/train.py b/model_zoo/resnet/train.py index 89ce62d733..7344ff0875 100755 --- a/model_zoo/resnet/train.py +++ b/model_zoo/resnet/train.py @@ -88,10 +88,10 @@ if __name__ == '__main__': # create dataset if args_opt.net == "resnet50": - dataset = create_dataset(dataset_path=args_opt.dataset_path, do_train=True, repeat_num=config.epoch_size, + dataset = create_dataset(dataset_path=args_opt.dataset_path, do_train=True, repeat_num=1, batch_size=config.batch_size, target=target) else: - dataset = create_dataset(dataset_path=args_opt.dataset_path, do_train=True, repeat_num=config.epoch_size, + dataset = create_dataset(dataset_path=args_opt.dataset_path, do_train=True, repeat_num=1, batch_size=config.batch_size) step_size = dataset.get_dataset_size() diff --git a/model_zoo/resnet_thor/train.py b/model_zoo/resnet_thor/train.py index 47f56a0676..b6a84fe136 100644 --- a/model_zoo/resnet_thor/train.py +++ b/model_zoo/resnet_thor/train.py @@ -105,7 +105,7 @@ if __name__ == '__main__': loss = CrossEntropy(smooth_factor=config.label_smooth_factor, num_classes=config.class_num) if args_opt.do_train: dataset = create_dataset(dataset_path=args_opt.dataset_path, do_train=True, - repeat_num=epoch_size, batch_size=config.batch_size) + batch_size=config.batch_size) step_size = dataset.get_dataset_size() loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) diff --git a/model_zoo/ssd/train.py b/model_zoo/ssd/train.py index 27f0e7ad0f..a0969a06fd 100644 --- a/model_zoo/ssd/train.py +++ b/model_zoo/ssd/train.py @@ -91,7 +91,7 @@ def main(): loss_scale = float(args_opt.loss_scale) # When create MindDataset, using the fitst mindrecord file, such as ssd.mindrecord0. - dataset = create_ssd_dataset(mindrecord_file, repeat_num=args_opt.epoch_size, + dataset = create_ssd_dataset(mindrecord_file, repeat_num=1, batch_size=args_opt.batch_size, device_num=device_num, rank=rank) dataset_size = dataset.get_dataset_size() diff --git a/model_zoo/vgg16/train.py b/model_zoo/vgg16/train.py index 33a4f0310c..5195476b97 100644 --- a/model_zoo/vgg16/train.py +++ b/model_zoo/vgg16/train.py @@ -83,7 +83,7 @@ if __name__ == '__main__': mirror_mean=True) init() - dataset = vgg_create_dataset(args_opt.data_path, cfg.epoch_size) + dataset = vgg_create_dataset(args_opt.data_path, 1) batch_num = dataset.get_dataset_size() net = vgg16(num_classes=cfg.num_classes) diff --git a/model_zoo/wide_and_deep/train.py b/model_zoo/wide_and_deep/train.py index a043be3dc6..0d76b5d1a4 100644 --- a/model_zoo/wide_and_deep/train.py +++ b/model_zoo/wide_and_deep/train.py @@ -63,7 +63,7 @@ def test_train(configure): data_path = configure.data_path batch_size = configure.batch_size epochs = configure.epochs - ds_train = create_dataset(data_path, train_mode=True, epochs=epochs, batch_size=batch_size) + ds_train = create_dataset(data_path, train_mode=True, epochs=1, batch_size=batch_size) print("ds_train.size: {}".format(ds_train.get_dataset_size())) net_builder = ModelBuilder() diff --git a/model_zoo/wide_and_deep/train_and_eval.py b/model_zoo/wide_and_deep/train_and_eval.py index e0ab6b2e9e..779ef92b4f 100644 --- a/model_zoo/wide_and_deep/train_and_eval.py +++ b/model_zoo/wide_and_deep/train_and_eval.py @@ -67,8 +67,8 @@ def test_train_eval(config): data_path = config.data_path batch_size = config.batch_size epochs = config.epochs - ds_train = create_dataset(data_path, train_mode=True, epochs=epochs, batch_size=batch_size) - ds_eval = create_dataset(data_path, train_mode=False, epochs=epochs + 1, batch_size=batch_size) + ds_train = create_dataset(data_path, train_mode=True, epochs=1, batch_size=batch_size) + ds_eval = create_dataset(data_path, train_mode=False, epochs=1, batch_size=batch_size) print("ds_train.size: {}".format(ds_train.get_dataset_size())) print("ds_eval.size: {}".format(ds_eval.get_dataset_size())) diff --git a/model_zoo/wide_and_deep/train_and_eval_auto_parallel.py b/model_zoo/wide_and_deep/train_and_eval_auto_parallel.py index 4c86931b2e..6d36bf9af4 100644 --- a/model_zoo/wide_and_deep/train_and_eval_auto_parallel.py +++ b/model_zoo/wide_and_deep/train_and_eval_auto_parallel.py @@ -85,14 +85,14 @@ def train_and_eval(config): if config.full_batch: context.set_auto_parallel_context(full_batch=True) de.config.set_seed(1) - ds_train = create_dataset(data_path, train_mode=True, epochs=epochs, + ds_train = create_dataset(data_path, train_mode=True, epochs=1, batch_size=batch_size*get_group_size()) - ds_eval = create_dataset(data_path, train_mode=False, epochs=epochs + 1, + ds_eval = create_dataset(data_path, train_mode=False, epochs=1, batch_size=batch_size*get_group_size()) else: - ds_train = create_dataset(data_path, train_mode=True, epochs=epochs, + ds_train = create_dataset(data_path, train_mode=True, epochs=1, batch_size=batch_size, rank_id=get_rank(), rank_size=get_group_size()) - ds_eval = create_dataset(data_path, train_mode=False, epochs=epochs + 1, + ds_eval = create_dataset(data_path, train_mode=False, epochs=1, batch_size=batch_size, rank_id=get_rank(), rank_size=get_group_size()) print("ds_train.size: {}".format(ds_train.get_dataset_size())) print("ds_eval.size: {}".format(ds_eval.get_dataset_size())) diff --git a/model_zoo/wide_and_deep/train_and_eval_distribute.py b/model_zoo/wide_and_deep/train_and_eval_distribute.py index 71f2b11cba..e85c9186b9 100644 --- a/model_zoo/wide_and_deep/train_and_eval_distribute.py +++ b/model_zoo/wide_and_deep/train_and_eval_distribute.py @@ -74,9 +74,9 @@ def train_and_eval(config): batch_size = config.batch_size epochs = config.epochs print("epochs is {}".format(epochs)) - ds_train = create_dataset(data_path, train_mode=True, epochs=epochs, + ds_train = create_dataset(data_path, train_mode=True, epochs=1, batch_size=batch_size, rank_id=get_rank(), rank_size=get_group_size()) - ds_eval = create_dataset(data_path, train_mode=False, epochs=epochs + 1, + ds_eval = create_dataset(data_path, train_mode=False, epochs=1, batch_size=batch_size, rank_id=get_rank(), rank_size=get_group_size()) print("ds_train.size: {}".format(ds_train.get_dataset_size())) print("ds_eval.size: {}".format(ds_eval.get_dataset_size())) diff --git a/model_zoo/yolov3_resnet18/train.py b/model_zoo/yolov3_resnet18/train.py index 0a15066ed3..6a139627cf 100644 --- a/model_zoo/yolov3_resnet18/train.py +++ b/model_zoo/yolov3_resnet18/train.py @@ -121,7 +121,7 @@ def main(): loss_scale = float(args_opt.loss_scale) # When create MindDataset, using the fitst mindrecord file, such as yolo.mindrecord0. - dataset = create_yolo_dataset(mindrecord_file, repeat_num=args_opt.epoch_size, + dataset = create_yolo_dataset(mindrecord_file, batch_size=args_opt.batch_size, device_num=device_num, rank=rank) dataset_size = dataset.get_dataset_size() print("Create dataset done!") diff --git a/tests/dataset_mock.py b/tests/dataset_mock.py index 237e38a9d3..928c01c636 100644 --- a/tests/dataset_mock.py +++ b/tests/dataset_mock.py @@ -50,13 +50,20 @@ class MindData: def input_indexs(self): return self._input_indexs - def device_que(self): + def device_que(self, send_epoch_end=True): self.queue_name = '6ba41974-209e-11ea-88b0-a24efeb2c736' + self.send_epoch_end = send_epoch_end return self + def create_tuple_iterator(self): + return self.__iter__() + def send(self): pass + def stop_send(self): + pass + def __len__(self): return self._size diff --git a/tests/st/model_zoo_tests/deeplabv3/train_one_epoch_with_loss.py b/tests/st/model_zoo_tests/deeplabv3/train_one_epoch_with_loss.py index 73931a8046..878ab37812 100644 --- a/tests/st/model_zoo_tests/deeplabv3/train_one_epoch_with_loss.py +++ b/tests/st/model_zoo_tests/deeplabv3/train_one_epoch_with_loss.py @@ -73,7 +73,7 @@ if __name__ == "__main__": epoch_size = 3 args_opt.base_size = config.crop_size args_opt.crop_size = config.crop_size - train_dataset = create_dataset(args_opt, args_opt.data_url, epoch_size, config.batch_size, + train_dataset = create_dataset(args_opt, args_opt.data_url, 1, config.batch_size, usage="train", shuffle=False) dataset_size = train_dataset.get_dataset_size() callback = LossCallBack(dataset_size) diff --git a/tests/st/model_zoo_tests/transformer/test_transformer.py b/tests/st/model_zoo_tests/transformer/test_transformer.py index d36c6d9dcb..5e413edf74 100644 --- a/tests/st/model_zoo_tests/transformer/test_transformer.py +++ b/tests/st/model_zoo_tests/transformer/test_transformer.py @@ -120,10 +120,10 @@ def test_transformer(): batch_size = 96 epoch_size = 3 config = get_config(version=version, batch_size=batch_size) - dataset, repeat_count = create_transformer_dataset(epoch_count=epoch_size, - do_shuffle="false", - enable_data_sink="false", - dataset_path=DATA_DIR) + dataset = create_transformer_dataset(epoch_count=1, + do_shuffle="false", + enable_data_sink="false", + dataset_path=DATA_DIR) netwithloss = TransformerNetworkWithLoss(config, True) @@ -146,7 +146,7 @@ def test_transformer(): netwithgrads.set_train(True) time_monitor_callback = TimeMonitor(dataset.get_dataset_size()) model = Model(netwithgrads) - model.train(repeat_count, dataset, callbacks=[time_monitor_callback, callback], dataset_sink_mode=False) + model.train(epoch_size, dataset, callbacks=[time_monitor_callback, callback], dataset_sink_mode=False) # assertion occurs while the loss value, overflow state or loss_scale value is wrong loss_value = np.array(callback.loss_list) diff --git a/tests/st/model_zoo_tests/wide_and_deep/python_file_for_ci/train_and_test_multinpu_ci.py b/tests/st/model_zoo_tests/wide_and_deep/python_file_for_ci/train_and_test_multinpu_ci.py index 0aca7d1e75..930d7d6aaa 100644 --- a/tests/st/model_zoo_tests/wide_and_deep/python_file_for_ci/train_and_test_multinpu_ci.py +++ b/tests/st/model_zoo_tests/wide_and_deep/python_file_for_ci/train_and_test_multinpu_ci.py @@ -79,9 +79,9 @@ def test_train_eval(): batch_size = config.batch_size epochs = config.epochs print("epochs is {}".format(epochs)) - ds_train = create_dataset(data_path, train_mode=True, epochs=epochs, batch_size=batch_size, + ds_train = create_dataset(data_path, train_mode=True, epochs=1, batch_size=batch_size, data_type=DataType.MINDRECORD, rank_id=get_rank(), rank_size=get_group_size()) - ds_eval = create_dataset(data_path, train_mode=False, epochs=epochs + 1, batch_size=batch_size, + ds_eval = create_dataset(data_path, train_mode=False, epochs=1, batch_size=batch_size, data_type=DataType.MINDRECORD, rank_id=get_rank(), rank_size=get_group_size()) print("ds_train.size: {}".format(ds_train.get_dataset_size())) print("ds_eval.size: {}".format(ds_eval.get_dataset_size())) diff --git a/tests/st/model_zoo_tests/wide_and_deep/train_and_test_multinpu_ci_data_parallel.py b/tests/st/model_zoo_tests/wide_and_deep/train_and_test_multinpu_ci_data_parallel.py index e39562c92f..0f909b0236 100644 --- a/tests/st/model_zoo_tests/wide_and_deep/train_and_test_multinpu_ci_data_parallel.py +++ b/tests/st/model_zoo_tests/wide_and_deep/train_and_test_multinpu_ci_data_parallel.py @@ -76,9 +76,9 @@ def test_train_eval(): batch_size = config.batch_size epochs = config.epochs print("epochs is {}".format(epochs)) - ds_train = create_dataset(data_path, train_mode=True, epochs=epochs, + ds_train = create_dataset(data_path, train_mode=True, epochs=1, batch_size=batch_size, rank_id=get_rank(), rank_size=get_group_size()) - ds_eval = create_dataset(data_path, train_mode=False, epochs=epochs + 1, + ds_eval = create_dataset(data_path, train_mode=False, epochs=1, batch_size=batch_size, rank_id=get_rank(), rank_size=get_group_size()) print("ds_train.size: {}".format(ds_train.get_dataset_size())) print("ds_eval.size: {}".format(ds_eval.get_dataset_size())) diff --git a/tests/st/model_zoo_tests/yolov3/test_yolov3.py b/tests/st/model_zoo_tests/yolov3/test_yolov3.py index 126c66a6f3..25f15d1799 100644 --- a/tests/st/model_zoo_tests/yolov3/test_yolov3.py +++ b/tests/st/model_zoo_tests/yolov3/test_yolov3.py @@ -113,7 +113,7 @@ def test_yolov3(): loss_scale = float(loss_scale) # When create MindDataset, using the fitst mindrecord file, such as yolo.mindrecord0. - dataset = create_yolo_dataset(mindrecord_file, repeat_num=epoch_size, + dataset = create_yolo_dataset(mindrecord_file, repeat_num=1, batch_size=batch_size, device_num=device_num, rank=rank) dataset_size = dataset.get_dataset_size() print("Create dataset done!") @@ -146,12 +146,12 @@ def test_yolov3(): assert loss_value[2] < expect_loss_value[2] epoch_mseconds = np.array(time_monitor_callback.epoch_mseconds_list)[2] - expect_epoch_mseconds = 950 + expect_epoch_mseconds = 2000 print("epoch mseconds: {}".format(epoch_mseconds)) assert epoch_mseconds <= expect_epoch_mseconds per_step_mseconds = np.array(time_monitor_callback.per_step_mseconds_list)[2] - expect_per_step_mseconds = 110 + expect_per_step_mseconds = 220 print("per step mseconds: {}".format(per_step_mseconds)) assert per_step_mseconds <= expect_per_step_mseconds print("yolov3 test case passed.") diff --git a/tests/st/networks/models/bert/test_bert_tdt_lossscale.py b/tests/st/networks/models/bert/test_bert_tdt_lossscale.py index d4c56edbc1..c0b2d3231b 100644 --- a/tests/st/networks/models/bert/test_bert_tdt_lossscale.py +++ b/tests/st/networks/models/bert/test_bert_tdt_lossscale.py @@ -91,6 +91,7 @@ def me_de_train_dataset(sink_mode=False): """test me de train dataset""" # apply repeat operations repeat_count = 1 + sink_size = -1 batch_size = 16 ds = de.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["input_ids", "input_mask", "segment_ids", "next_sentence_labels", "masked_lm_positions", @@ -99,9 +100,9 @@ def me_de_train_dataset(sink_mode=False): new_repeat_count = repeat_count if sink_mode: repeat_count = 30 - sink_steps = 100 + sink_size = 100 ori_dataaet_size = ds.get_dataset_size() - new_size = sink_steps * batch_size + new_size = sink_size * batch_size ds.set_dataset_size(new_size) new_repeat_count = int(repeat_count * ori_dataaet_size // ds.get_dataset_size()) ds = ds.map(input_columns="masked_lm_ids", operations=type_cast_op) @@ -112,10 +113,9 @@ def me_de_train_dataset(sink_mode=False): ds = ds.map(input_columns="input_ids", operations=type_cast_op) # apply batch operations ds = ds.batch(batch_size, drop_remainder=True) - ds = ds.repeat(repeat_count) logger.info("data size: {}".format(ds.get_dataset_size())) logger.info("repeat_count: {}".format(ds.get_repeat_count())) - return ds, new_repeat_count + return ds, new_repeat_count, sink_size def weight_variable(shape): @@ -157,7 +157,7 @@ class TimeMonitor(Callback): def test_bert_percision(): """test bert percision""" context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", reserve_class_name_in_scope=False) - ds, new_repeat_count = me_de_train_dataset() + ds, new_repeat_count, _ = me_de_train_dataset() version = os.getenv('VERSION', 'large') batch_size = 16 config = get_config(version=version, batch_size=batch_size) @@ -215,7 +215,7 @@ def test_bert_percision(): def test_bert_performance(): """test bert performance""" context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", reserve_class_name_in_scope=False) - ds, new_repeat_count = me_de_train_dataset(sink_mode=True) + ds, new_repeat_count, sink_size = me_de_train_dataset(sink_mode=True) version = os.getenv('VERSION', 'large') batch_size = 16 config = get_config(version=version, batch_size=batch_size) @@ -251,7 +251,7 @@ def test_bert_performance(): param.default_input = weight_variable(value.asnumpy().shape) time_monitor_callback = TimeMonitor(ds.get_dataset_size()) model.train(new_repeat_count, ds, callbacks=[time_monitor_callback, callback], - dataset_sink_mode=True) + dataset_sink_mode=True, sink_size=sink_size) # assertion occurs while the loss value, overflow state or loss_scale value is wrong loss_value = np.array(callback.loss_list) diff --git a/tests/st/networks/models/deeplabv3/test_deeplabv3.py b/tests/st/networks/models/deeplabv3/test_deeplabv3.py index d033a991e9..e458897ae6 100644 --- a/tests/st/networks/models/deeplabv3/test_deeplabv3.py +++ b/tests/st/networks/models/deeplabv3/test_deeplabv3.py @@ -79,7 +79,7 @@ def test_deeplabv3_1p(): args_opt.base_size = config.crop_size args_opt.crop_size = config.crop_size args_opt.batch_size = config.batch_size - train_dataset = create_dataset(args_opt, data_url, epoch_size, config.batch_size, + train_dataset = create_dataset(args_opt, data_url, 1, config.batch_size, usage="eval") dataset_size = train_dataset.get_dataset_size() callback = LossCallBack(dataset_size) diff --git a/tests/st/networks/models/resnet50/test_resnet50_imagenet.py b/tests/st/networks/models/resnet50/test_resnet50_imagenet.py index e721b62c58..517c7da5e7 100644 --- a/tests/st/networks/models/resnet50/test_resnet50_imagenet.py +++ b/tests/st/networks/models/resnet50/test_resnet50_imagenet.py @@ -155,7 +155,7 @@ def train_process(q, device_id, epoch_size, device_num, enable_hccl): # train dataset dataset = create_dataset(dataset_path=dataset_path, do_train=True, - repeat_num=epoch_size, batch_size=config.batch_size) + repeat_num=1, batch_size=config.batch_size) step_size = dataset.get_dataset_size() eval_interval = config.eval_interval @@ -163,7 +163,7 @@ def train_process(q, device_id, epoch_size, device_num, enable_hccl): # evalutation dataset eval_dataset = create_dataset(dataset_path=eval_path, do_train=False, - repeat_num=epoch_size, batch_size=config.eval_batch_size) + repeat_num=1, batch_size=config.eval_batch_size) # loss scale loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) @@ -260,14 +260,14 @@ def train_process_thor(q, device_id, epoch_size, device_num, enable_hccl): # train dataset dataset = create_dataset(dataset_path=dataset_path, do_train=True, - repeat_num=epoch_size, batch_size=thor_config.batch_size) + repeat_num=1, batch_size=thor_config.batch_size) step_size = dataset.get_dataset_size() eval_interval = thor_config.eval_interval # evalutation dataset eval_dataset = create_dataset(dataset_path=eval_path, do_train=False, - repeat_num=epoch_size, batch_size=thor_config.eval_batch_size) + repeat_num=1, batch_size=thor_config.eval_batch_size) # loss scale loss_scale = FixedLossScaleManager(thor_config.loss_scale, drop_overflow_update=False) diff --git a/tests/st/tbe_networks/resnet_cifar.py b/tests/st/tbe_networks/resnet_cifar.py index cf9eb59400..c6b1ee0a78 100644 --- a/tests/st/tbe_networks/resnet_cifar.py +++ b/tests/st/tbe_networks/resnet_cifar.py @@ -136,7 +136,7 @@ if __name__ == '__main__': model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'}) if args_opt.do_train: - dataset = create_dataset(epoch_size) + dataset = create_dataset(1) batch_num = dataset.get_dataset_size() config_ck = CheckpointConfig(save_checkpoint_steps=batch_num * 5, keep_checkpoint_max=10) ckpoint_cb = ModelCheckpoint(prefix="train_resnet_cifar10", directory="./", config=config_ck) diff --git a/tests/st/tbe_networks/test_resnet_cifar_1p.py b/tests/st/tbe_networks/test_resnet_cifar_1p.py index 672d17c72b..8ef48b8774 100644 --- a/tests/st/tbe_networks/test_resnet_cifar_1p.py +++ b/tests/st/tbe_networks/test_resnet_cifar_1p.py @@ -140,7 +140,7 @@ def train_process(epoch_size, num_classes, batch_size): model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'}) - dataset = create_dataset(epoch_size, training=True, batch_size=batch_size) + dataset = create_dataset(1, training=True, batch_size=batch_size) loss_cb = LossGet() model.train(epoch_size, dataset, callbacks=[loss_cb]) diff --git a/tests/st/tbe_networks/test_resnet_cifar_8p.py b/tests/st/tbe_networks/test_resnet_cifar_8p.py index a13f367b9f..56d6a91d64 100644 --- a/tests/st/tbe_networks/test_resnet_cifar_8p.py +++ b/tests/st/tbe_networks/test_resnet_cifar_8p.py @@ -164,7 +164,7 @@ def train_process(q, device_id, epoch_size, num_classes, device_num, batch_size, model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'}) - dataset = create_dataset(epoch_size, training=True, + dataset = create_dataset(1, training=True, batch_size=batch_size, rank_id=device_id, rank_size=device_num, enable_hccl=enable_hccl) diff --git a/tests/ut/cpp/dataset/CMakeLists.txt b/tests/ut/cpp/dataset/CMakeLists.txt index 084bd05ab4..d3aefb574b 100644 --- a/tests/ut/cpp/dataset/CMakeLists.txt +++ b/tests/ut/cpp/dataset/CMakeLists.txt @@ -91,8 +91,9 @@ SET(DE_UT_SRCS cyclic_array_test.cc perf_data_test.cc c_api_test.cc - tensor_op_fusion_pass_test.cc + tensor_op_fusion_pass_test.cc sliding_window_op_test.cc + epoch_ctrl_op_test.cc ) add_executable(de_ut_tests ${DE_UT_SRCS}) diff --git a/tests/ut/cpp/dataset/cache_op_test.cc b/tests/ut/cpp/dataset/cache_op_test.cc index bdb7c861b2..1d07a6e0c9 100644 --- a/tests/ut/cpp/dataset/cache_op_test.cc +++ b/tests/ut/cpp/dataset/cache_op_test.cc @@ -397,23 +397,21 @@ TEST_F(MindDataTestCacheOp, TestImageFolderCacheMerge) { std::shared_ptr myClient = std::make_shared(1, 0, true); - std::shared_ptr myMergeOp; - rc = CacheMergeOp::Builder().SetNumWorkers(3).SetOpConnectorSize(3).SetNumCleaner(2).SetClient(myClient).Build( - &myMergeOp); - EXPECT_TRUE(rc.IsOk()); + // In a mappable dataset, it uses a complex interactions of cache lookup op and cache merge op. + // Rather than manually build this, the way to do it is to choose the position of the cache in the tree by + // adding a CacheOp. Then, the tree prepare code will drive a transform that will remove the CacheOp and + // replace it with the required tree structures for cache lookup op and cache merge op. - std::shared_ptr myLookupOp; - rc = CacheLookupOp::Builder() - .SetNumWorkers(3) - .SetOpConnectorSize(3) + std::shared_ptr myCacheOp; + rc = CacheOp::Builder() + .SetNumWorkers(4) .SetClient(myClient) - .SetSampler(seq_sampler) - .Build(&myLookupOp); - EXPECT_TRUE(rc.IsOk()); + .SetRowsPerBuffer(3) + .Build(&myCacheOp); std::shared_ptr so; ImageFolderOp::Builder builder; - builder.SetSampler(myLookupOp) + builder.SetSampler(std::move(seq_sampler)) .SetOpConnectorSize(3) .SetNumWorkers(3) .SetRowsPerBuffer(2) @@ -432,20 +430,18 @@ TEST_F(MindDataTestCacheOp, TestImageFolderCacheMerge) { auto myTree = std::make_shared(); rc = myTree->AssociateNode(so); EXPECT_TRUE(rc.IsOk()); - rc = myTree->AssociateNode(myLookupOp); - EXPECT_TRUE(rc.IsOk()); - rc = myTree->AssociateNode(myMergeOp); + + rc = myTree->AssociateNode(myCacheOp); EXPECT_TRUE(rc.IsOk()); + rc = myTree->AssociateNode(myRepeatOp); EXPECT_TRUE(rc.IsOk()); rc = myTree->AssignRoot(myRepeatOp); EXPECT_TRUE(rc.IsOk()); - rc = myRepeatOp->AddChild(myMergeOp); + rc = myRepeatOp->AddChild(myCacheOp); EXPECT_TRUE(rc.IsOk()); - rc = myMergeOp->AddChild(myLookupOp); - EXPECT_TRUE(rc.IsOk()); - rc = myMergeOp->AddChild(so); + rc = myCacheOp->AddChild(so); EXPECT_TRUE(rc.IsOk()); rc = myTree->Prepare(); diff --git a/tests/ut/cpp/dataset/epoch_ctrl_op_test.cc b/tests/ut/cpp/dataset/epoch_ctrl_op_test.cc new file mode 100644 index 0000000000..2fc5f3c047 --- /dev/null +++ b/tests/ut/cpp/dataset/epoch_ctrl_op_test.cc @@ -0,0 +1,639 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "minddata/dataset/core/client.h" +#include "minddata/dataset/engine/datasetops/source/image_folder_op.h" +#include "common/common.h" +#include "gtest/gtest.h" +#include "utils/log_adapter.h" +#include + +using namespace mindspore::dataset; +using mindspore::MsLogLevel::INFO; +using mindspore::ExceptionType::NoExceptionType; +using mindspore::LogStream; + +std::shared_ptr ImageFolder(int64_t num_works, int64_t rows, int64_t conns, std::string path, + bool shuf = false, std::shared_ptr sampler = nullptr, + std::map map = {}, bool decode = false); + +std::shared_ptr Build(std::vector> ops); + +class MindDataTestEpochCtrlOp : public UT::DatasetOpTesting { +public: + void SetUp() override { + DatasetOpTesting::SetUp(); + folder_path = datasets_root_path_ + "/testPK/data"; + + GlobalInit(); + + // Start with an empty execution tree + my_tree_ = std::make_shared(); + + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false)}); + rc = my_tree_->Prepare(); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + // Start the loop of reading tensors from our pipeline + DatasetIterator di(my_tree_); + TensorMap tensor_map; + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + int32_t i = 0; + while (tensor_map.size() != 0) { + tensor_map["label"]->GetItemAt(&label, {}); + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + golden_imgs.append((char *) tensor_map["image"]->GetBuffer(), (int64_t) tensor_map["image"]->Size()); + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + i++; + } + } + + std::shared_ptr my_tree_; + Status rc; + std::string golden_imgs; + std::string folder_path; + int32_t label = 0; + std::string result; + int32_t img_class[4] = {0, 1, 2, 3}; + +}; + +TEST_F(MindDataTestEpochCtrlOp, ImageFolder_AutoInjectEpoch) { + MS_LOG(WARNING) << "Doing ImageFolder_AutoInjectEpoch."; + + int32_t num_epoch = 2 + std::rand() % 5; + + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false)}); + rc = my_tree_->Prepare(); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + MS_LOG(DEBUG) << "num_epoch: " << num_epoch; + std::string golden = golden_imgs; + + // Start the loop of reading tensors from our pipeline + DatasetIterator di(my_tree_); + TensorMap tensor_map; + uint64_t i = 0; + for (int epoch = 0; epoch < num_epoch; epoch++) { + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + while (tensor_map.size() != 0) { + tensor_map["label"]->GetItemAt(&label, {}); + MS_LOG(DEBUG) << "row:" << i << "\tlabel:" << label << "\n"; + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + result.append((char *) tensor_map["image"]->GetBuffer(), (int64_t) tensor_map["image"]->Size()); + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + i++; + } + EXPECT_TRUE(result == golden); + result.clear(); + + MS_LOG(DEBUG) << "Current epoch: " << epoch << ". Sample count: " << i; + } + + EXPECT_TRUE(i == 44 * num_epoch); + + // Try to fetch data beyond the specified number of epochs. + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); +} + +TEST_F(MindDataTestEpochCtrlOp, ImageFolder_Epoch) { + MS_LOG(WARNING) << "Doing ImageFolder_Epoch."; + + int32_t num_epoch = 2 + std::rand() % 5; + + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false)}); + rc = my_tree_->Prepare(num_epoch); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + MS_LOG(DEBUG) << "num_epoch: " << num_epoch; + std::string golden = golden_imgs; + + // Start the loop of reading tensors from our pipeline + DatasetIterator di(my_tree_); + TensorMap tensor_map; + uint64_t i = 0; + for (int epoch = 0; epoch < num_epoch; epoch++) { + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + while (tensor_map.size() != 0) { + tensor_map["label"]->GetItemAt(&label, {}); + MS_LOG(DEBUG) << "row:" << i << "\tlabel:" << label << "\n"; + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + result.append((char *) tensor_map["image"]->GetBuffer(), (int64_t) tensor_map["image"]->Size()); + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + i++; + } + EXPECT_TRUE(result == golden); + result.clear(); + + MS_LOG(DEBUG) << "Current epoch: " << epoch << ". Sample count: " << i; + } + + EXPECT_TRUE(i == 44 * num_epoch); + + // Try to fetch data beyond the specified number of epochs. + rc = di.GetNextAsMap(&tensor_map); + EXPECT_FALSE(rc.IsOk()); +} + +TEST_F(MindDataTestEpochCtrlOp, ImageFolder_Repeat_Epoch) { + MS_LOG(WARNING) << "Doing ImageFolder_Repeat_Epoch."; + + int32_t num_epoch = 2 + std::rand() % 5; + + int32_t num_repeats = 2; + std::shared_ptr repeat_op; + rc = RepeatOp::Builder(num_repeats).Build(&repeat_op); + EXPECT_TRUE(rc.IsOk()); + + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false), repeat_op}); + rc = my_tree_->Prepare(num_epoch); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + MS_LOG(DEBUG) << "num_epoch: " << num_epoch << ". num_repeat: " << num_repeats; + std::string golden = golden_imgs; + for (int i = 1; i < num_repeats; i++) { + golden += golden_imgs; + } + + // Start the loop of reading tensors from our pipeline + DatasetIterator di(my_tree_); + TensorMap tensor_map; + uint64_t i = 0; + for (int epoch = 0; epoch < num_epoch; epoch++) { + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + while (tensor_map.size() != 0) { + tensor_map["label"]->GetItemAt(&label, {}); + MS_LOG(DEBUG) << "row:" << i << "\tlabel:" << label << "\n"; + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + result.append((char *) tensor_map["image"]->GetBuffer(), (int64_t) tensor_map["image"]->Size()); + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + i++; + } + EXPECT_TRUE(result == golden); + result.clear(); + + MS_LOG(DEBUG) << "Current epoch: " << epoch << ". Sample count: " << i; + } + + EXPECT_TRUE(i == 44 * num_repeats * num_epoch); + + // Try to fetch data beyond the specified number of epochs. + rc = di.GetNextAsMap(&tensor_map); + EXPECT_FALSE(rc.IsOk()); +} + +TEST_F(MindDataTestEpochCtrlOp, ImageFolder_Repeat_Repeat_Epoch) { + MS_LOG(WARNING) << "Doing ImageFolder_Repeat_Repeat_Epoch."; + + int32_t num_epoch = 2 + std::rand() % 5; + + int32_t num_repeats = 2; + std::shared_ptr repeat_op; + rc = RepeatOp::Builder(num_repeats).Build(&repeat_op); + EXPECT_TRUE(rc.IsOk()); + + int32_t num_repeats_2 = 3; + std::shared_ptr repeat_op_2; + rc = RepeatOp::Builder(num_repeats_2).Build(&repeat_op_2); + EXPECT_TRUE(rc.IsOk()); + + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false), repeat_op, repeat_op_2}); + rc = my_tree_->Prepare(num_epoch); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + MS_LOG(DEBUG) << "num_epoch: " << num_epoch << ". num_repeat: " << num_repeats << ". num_repeat_2: " << num_repeats_2; + std::string golden; + for (int j = 0; j < num_repeats_2; j++) { + for (int i = 0; i < num_repeats; i++) { + golden += golden_imgs; + } + } + + // Start the loop of reading tensors from our pipeline + DatasetIterator di(my_tree_); + TensorMap tensor_map; + uint64_t i = 0; + for (int epoch = 0; epoch < num_epoch; epoch++) { + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + while (tensor_map.size() != 0) { + tensor_map["label"]->GetItemAt(&label, {}); + MS_LOG(DEBUG) << "row:" << i << "\tlabel:" << label << "\n"; + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + result.append((char *) tensor_map["image"]->GetBuffer(), (int64_t) tensor_map["image"]->Size()); + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + i++; + } + EXPECT_EQ(result.size(), golden.size()); + EXPECT_TRUE(result == golden); + result.clear(); + + MS_LOG(DEBUG) << "Current epoch: " << epoch << ". Sample count: " << i; + } + + EXPECT_EQ(i, 44 * num_epoch * num_repeats * num_repeats_2); + + // Try to fetch data beyond the specified number of epochs. + rc = di.GetNextAsMap(&tensor_map); + EXPECT_FALSE(rc.IsOk()); +} + +TEST_F(MindDataTestEpochCtrlOp, ImageFolder_Epoch_Inf) { + MS_LOG(WARNING) << "Doing ImageFolder_Epoch_Inf."; + + // if num_epoch == -1, it means infinity. + int32_t num_epoch = -1; + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false)}); + rc = my_tree_->Prepare(num_epoch); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + // Start the loop of reading tensors from our pipeline + DatasetIterator di(my_tree_); + TensorMap tensor_map; + uint64_t i = 0; + + // For this test, we stop at stop_at_epoch number. + int32_t stop_at_epoch = 2 + std::rand() % 6; + MS_LOG(DEBUG) << "num_epoch: " << num_epoch << ". Stop at epoch: " << stop_at_epoch; + for (int epoch = 0; epoch < stop_at_epoch; epoch++) { + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + while (tensor_map.size() != 0) { + tensor_map["label"]->GetItemAt(&label, {}); + MS_LOG(DEBUG) << "row:" << i << "\tlabel:" << label << "\n"; + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + result.append((char *) tensor_map["image"]->GetBuffer(), (int64_t) tensor_map["image"]->Size()); + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + i++; + } + EXPECT_EQ(result, golden_imgs); + result.clear(); + MS_LOG(DEBUG) << "Current epoch: " << epoch << ". Sample count: " << i; + } + EXPECT_TRUE(i == 44 * stop_at_epoch); +} + +TEST_F(MindDataTestEpochCtrlOp, ImageFolder_Repeat_Repeat_Epoch_Inf) { + MS_LOG(WARNING) << "Doing ImageFolder_Repeat_Epoch_Inf."; + + // if num_epoch == -1, it means infinity. + int32_t num_epoch = -1; + + int32_t num_repeats = 2; + std::shared_ptr repeat_op; + rc = RepeatOp::Builder(num_repeats).Build(&repeat_op); + EXPECT_TRUE(rc.IsOk()); + + int32_t num_repeats_2 = 3; + std::shared_ptr repeat_op_2; + rc = RepeatOp::Builder(num_repeats_2).Build(&repeat_op_2); + EXPECT_TRUE(rc.IsOk()); + + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false), repeat_op, repeat_op_2}); + rc = my_tree_->Prepare(num_epoch); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + MS_LOG(DEBUG) << "num_epoch: " << num_epoch << ". num_repeat: " << num_repeats << ". num_repeat_2: " << num_repeats_2; + std::string golden; + for (int j = 0; j < num_repeats_2; j++) { + for (int i = 0; i < num_repeats; i++) { + golden += golden_imgs; + } + } + + // Start the loop of reading tensors from our pipeline + DatasetIterator di(my_tree_); + TensorMap tensor_map; + uint64_t i = 0; + + // For this test, we stop at stop_at_epoch number. + int32_t stop_at_epoch = 2 + std::rand() % 6; + MS_LOG(DEBUG) << "num_epoch: " << num_epoch << ". Stop at epoch: " << stop_at_epoch; + for (int epoch = 0; epoch < stop_at_epoch; epoch++) { + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + while (tensor_map.size() != 0) { + tensor_map["label"]->GetItemAt(&label, {}); + MS_LOG(DEBUG) << "row:" << i << "\tlabel:" << label << "\n"; + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + result.append((char *) tensor_map["image"]->GetBuffer(), (int64_t) tensor_map["image"]->Size()); + rc = di.GetNextAsMap(&tensor_map); + EXPECT_TRUE(rc.IsOk()); + i++; + } + EXPECT_EQ(result, golden); + result.clear(); + MS_LOG(DEBUG) << "Current epoch: " << epoch << ". Sample count: " << i; + } + EXPECT_TRUE(i == 44 * stop_at_epoch * num_repeats * num_repeats_2); +} + +TEST_F(MindDataTestEpochCtrlOp, ImageFolder_Epoch_ChildItr) { + MS_LOG(WARNING) << "Doing ImageFolder_Epoch_ChildItr."; + + int32_t num_epoch = 2 + std::rand() % 5; + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false)}); + rc = my_tree_->Prepare(num_epoch); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + MS_LOG(INFO) << "num_epoch: " << num_epoch; + + // Start the loop of reading tensors from our pipeline + ChildIterator ci(my_tree_->root().get(), 0, 0); + TensorRow tensor_row; + uint64_t total_sample = 0; + uint64_t i = 0; + uint32_t epoch = 0; + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + while(!ci.eof_handled()) { + i = 0; + while (tensor_row.size() != 0) { + tensor_row[1]->GetItemAt(&label, {}); + MS_LOG(DEBUG) << "row:" << i << "\tlabel:" << label << "\n"; + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + result.append((char *) tensor_row[0]->GetBuffer(), (int64_t) tensor_row[0]->Size()); + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + i++; + } + + epoch++; + MS_LOG(DEBUG) << "Current epoch: " << epoch << ". Sample count: " << i; + EXPECT_TRUE(result == golden_imgs); + result.clear(); + EXPECT_TRUE(i == 44); + total_sample += i; + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + } + EXPECT_TRUE(total_sample == 44 * num_epoch); + + // Try to fetch data after last epoch ends. + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(tensor_row.empty()); + EXPECT_FALSE(rc.IsOk()); +} + +TEST_F(MindDataTestEpochCtrlOp, ImageFolder_Repeat_Epoch_ChildItr) { + MS_LOG(WARNING) << "Doing ImageFolder_Repeat_Epoch_ChildItr."; + + int32_t num_epoch = 2 + std::rand() % 5; + + int32_t num_repeats = 2; + std::shared_ptr repeat_op; + rc = RepeatOp::Builder(num_repeats).Build(&repeat_op); + EXPECT_TRUE(rc.IsOk()); + + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false), repeat_op}); + rc = my_tree_->Prepare(num_epoch); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + MS_LOG(DEBUG) << "num_epoch: " << num_epoch << ". num_repeat: " << num_repeats; + std::string golden; + for (int i = 0; i < num_repeats; i++) { + golden += golden_imgs; + } + + // Start the loop of reading tensors from our pipeline + ChildIterator ci(my_tree_->root().get(), 0, 0); + TensorRow tensor_row; + uint64_t total_sample = 0; + uint64_t i = 0; + uint32_t epoch = 0; + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + while(!ci.eof_handled()) { + i = 0; + while (tensor_row.size() != 0) { + tensor_row[1]->GetItemAt(&label, {}); + MS_LOG(DEBUG) << "row:" << i << "\tlabel:" << label << "\n"; + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + result.append((char *) tensor_row[0]->GetBuffer(), (int64_t) tensor_row[0]->Size()); + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + i++; + } + + epoch++; + MS_LOG(DEBUG) << "Current epoch: " << epoch << ". Sample count: " << i; + EXPECT_TRUE(result == golden); + result.clear(); + EXPECT_TRUE(i == 44 * num_repeats); + total_sample += i; + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + } + EXPECT_TRUE(total_sample == 44 * num_epoch * num_repeats); + + // Try to fetch data after last epoch ends. + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(tensor_row.empty()); + EXPECT_FALSE(rc.IsOk()); +} + +TEST_F(MindDataTestEpochCtrlOp, ImageFolder_Repeat_Repeat_Epoch_ChildItr) { + MS_LOG(WARNING) << "Doing ImageFolder_Repeat_Repeat_Epoch_ChildItr."; + + int32_t num_epoch = 2 + std::rand() % 5; + + int32_t num_repeats = 2; + std::shared_ptr repeat_op; + rc = RepeatOp::Builder(num_repeats).Build(&repeat_op); + EXPECT_TRUE(rc.IsOk()); + + int32_t num_repeats_2 = 3; + std::shared_ptr repeat_op_2; + rc = RepeatOp::Builder(num_repeats_2).Build(&repeat_op_2); + EXPECT_TRUE(rc.IsOk()); + + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false), repeat_op, repeat_op_2}); + rc = my_tree_->Prepare(num_epoch); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + MS_LOG(DEBUG) << "num_epoch: " << num_epoch << ". num_repeat: " << num_repeats << ". num_repeat_2: " << num_repeats_2; + std::string golden; + for (int j = 0; j < num_repeats_2; j++) { + for (int i = 0; i < num_repeats; i++) { + golden += golden_imgs; + } + } + + // Start the loop of reading tensors from our pipeline + ChildIterator ci(my_tree_->root().get(), 0, 0); + TensorRow tensor_row; + uint64_t total_sample = 0; + uint64_t i = 0; + uint32_t epoch = 0; + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + while(!ci.eof_handled()) { + i = 0; + while (tensor_row.size() != 0) { + tensor_row[1]->GetItemAt(&label, {}); + MS_LOG(DEBUG) << "row:" << i << "\tlabel:" << label << "\n"; + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + result.append((char *) tensor_row[0]->GetBuffer(), (int64_t) tensor_row[0]->Size()); + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + i++; + } + + epoch++; + MS_LOG(DEBUG) << "Current epoch: " << epoch << ". Sample count: " << i; + EXPECT_TRUE(result == golden); + result.clear(); + EXPECT_TRUE(i == 44 * num_repeats * num_repeats_2); + total_sample += i; + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + } + EXPECT_TRUE(total_sample == 44 * num_epoch * num_repeats * num_repeats_2); + + // Try to fetch data after last epoch ends. + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(tensor_row.empty()); + EXPECT_FALSE(rc.IsOk()); +} + +TEST_F(MindDataTestEpochCtrlOp, ImageFolder_Epoch_Inf_ChildItr) { + MS_LOG(WARNING) << "Doing ImageFolder_Epoch_Inf_ChildItr."; + + // if num_epoch == -1, it means infinity. + int32_t num_epoch = -1; + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false)}); + rc = my_tree_->Prepare(num_epoch); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + // Start the loop of reading tensors from our pipeline + ChildIterator ci(my_tree_->root().get(), 0, 0); + TensorRow tensor_row; + uint64_t i = 0; + + // For this test, we stop at a random number between 0 - 100 epochs. + int32_t stop_at_epoch = 2 + std::rand() % 5; + MS_LOG(DEBUG) << "num_epoch: " << num_epoch << ". Stop at epoch: " << stop_at_epoch; + for (int epoch = 0; epoch < stop_at_epoch; epoch++) { + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + while (tensor_row.size() != 0) { + tensor_row[1]->GetItemAt(&label, {}); + MS_LOG(DEBUG) << "row:" << i << "\tlabel:" << label << "\n"; + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + result.append((char *) tensor_row[0]->GetBuffer(), (int64_t) tensor_row[0]->Size()); + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + i++; + } + EXPECT_TRUE(result == golden_imgs); + result.clear(); + MS_LOG(DEBUG) << "Current epoch: " << epoch << ". Sample count: " << i; + } + EXPECT_TRUE(i == 44 * stop_at_epoch); +} + +TEST_F(MindDataTestEpochCtrlOp, ImageFolder_Repeat_Epoch_Inf_ChildItr) { + MS_LOG(WARNING) << "Doing ImageFolder_Repeat_Epoch_Inf_ChildItr."; + + // if num_epoch == -1, it means infinity. + int32_t num_epoch = -1; + int32_t num_repeats = 2; + std::shared_ptr repeat_op; + rc = RepeatOp::Builder(num_repeats).Build(&repeat_op); + EXPECT_TRUE(rc.IsOk()); + + my_tree_ = Build({ImageFolder(2, 2, 32, folder_path, false), repeat_op}); + rc = my_tree_->Prepare(num_epoch); + EXPECT_TRUE(rc.IsOk()); + rc = my_tree_->Launch(); + EXPECT_TRUE(rc.IsOk()); + + MS_LOG(DEBUG) << "num_epoch: " << num_epoch << ". num_repeat: " << num_repeats; + std::string golden; + for (int i = 0; i < num_repeats; i++) { + golden += golden_imgs; + } + + // Start the loop of reading tensors from our pipeline + ChildIterator ci(my_tree_->root().get(), 0, 0); + TensorRow tensor_row; + uint64_t i = 0; + + // For this test, we stop at a random number between 0 - 100 epochs. + int32_t stop_at_epoch = 2 + std::rand() % 5; + MS_LOG(DEBUG) << "num_epoch: " << num_epoch << ". Stop at epoch: " << stop_at_epoch; + for (int epoch = 0; epoch < stop_at_epoch; epoch++) { + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + while (tensor_row.size() != 0) { + tensor_row[1]->GetItemAt(&label, {}); + MS_LOG(DEBUG) << "row:" << i << "\tlabel:" << label << "\n"; + EXPECT_TRUE(img_class[(i % 44) / 11] == label); + // Dump all the image into string, to be used as a comparison later. + result.append((char *) tensor_row[0]->GetBuffer(), (int64_t) tensor_row[0]->Size()); + rc = ci.FetchNextTensorRow(&tensor_row); + EXPECT_TRUE(rc.IsOk()); + i++; + } + EXPECT_TRUE(result == golden); + result.clear(); + MS_LOG(DEBUG) << "Current epoch: " << epoch << ". Sample count: " << i; + } + EXPECT_TRUE(i == 44 * stop_at_epoch * num_repeats); +} diff --git a/tests/ut/cpp/dataset/repeat_op_test.cc b/tests/ut/cpp/dataset/repeat_op_test.cc index 74d494c0dc..c74aee06ab 100644 --- a/tests/ut/cpp/dataset/repeat_op_test.cc +++ b/tests/ut/cpp/dataset/repeat_op_test.cc @@ -46,7 +46,8 @@ TEST_F(MindDataTestrepeat_op, Testrepeat_opFuntions) { ASSERT_TRUE(rc.IsOk()); rc = my_tree->AssociateNode(my_tfreader_op); ASSERT_TRUE(rc.IsOk()); - my_tree->AssociateNode(parent_op); + rc = my_tree->AssociateNode(parent_op); + ASSERT_TRUE(rc.IsOk()); ASSERT_NE(parent_op, nullptr); ASSERT_NE(my_tfreader_op, nullptr); parent_op->AddChild(std::move(my_tfreader_op)); diff --git a/tests/ut/python/dataset/test_cache_map.py b/tests/ut/python/dataset/test_cache_map.py index 0e42b422aa..5de969db6d 100644 --- a/tests/ut/python/dataset/test_cache_map.py +++ b/tests/ut/python/dataset/test_cache_map.py @@ -104,9 +104,11 @@ def test_cache_map_basic3(): decode_op = c_vision.Decode() ds1 = ds1.repeat(4) ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache) + print("ds1.dataset_size is ", ds1.get_dataset_size()) num_iter = 0 for _ in ds1.create_dict_iterator(): + print("get data from dataset") num_iter += 1 logger.info("Number of data in ds1: {} ".format(num_iter)) @@ -152,6 +154,10 @@ def test_cache_map_failure1(): if __name__ == '__main__': test_cache_map_basic1() + print("test_cache_map_basic1 success.") test_cache_map_basic2() + print("test_cache_map_basic2 success.") test_cache_map_basic3() + print("test_cache_map_basic3 success.") test_cache_map_failure1() + print("test_cache_map_failure1 success.") diff --git a/tests/ut/python/dataset/test_datasets_tfrecord.py b/tests/ut/python/dataset/test_datasets_tfrecord.py index f57c387b35..f0a08d34bb 100644 --- a/tests/ut/python/dataset/test_datasets_tfrecord.py +++ b/tests/ut/python/dataset/test_datasets_tfrecord.py @@ -238,7 +238,7 @@ def test_tfrecord_shard_equal_rows(): def test_tfrecord_no_schema_columns_list(): logger.info("test_tfrecord_no_schema_columns_list") data = ds.TFRecordDataset(FILES, shuffle=False, columns_list=["col_sint16"]) - row = data.create_dict_iterator().get_next() + row = data.create_dict_iterator().__next__() assert row["col_sint16"] == [-32768] with pytest.raises(KeyError) as info: @@ -258,7 +258,7 @@ def test_tfrecord_schema_columns_list(): schema.add_column('col_sint32', de_type=mstype.int64, shape=[1]) schema.add_column('col_sint64', de_type=mstype.int64, shape=[1]) data = ds.TFRecordDataset(FILES, schema=schema, shuffle=False, columns_list=["col_sint16"]) - row = data.create_dict_iterator().get_next() + row = data.create_dict_iterator().__next__() assert row["col_sint16"] == [-32768] with pytest.raises(KeyError) as info: diff --git a/tests/ut/python/dataset/test_deviceop_cpu.py b/tests/ut/python/dataset/test_deviceop_cpu.py index 1c701c3e40..b5f18665e0 100644 --- a/tests/ut/python/dataset/test_deviceop_cpu.py +++ b/tests/ut/python/dataset/test_deviceop_cpu.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== +import time + import mindspore.dataset as ds import mindspore.dataset.transforms.vision.c_transforms as vision from mindspore import log as logger @@ -35,6 +37,8 @@ def test_case_0(): data = data.device_que() data.send() + time.sleep(0.1) + data.stop_send() def test_case_1(): @@ -58,6 +62,8 @@ def test_case_1(): data = data.device_que() data.send() + time.sleep(0.1) + data.stop_send() def test_case_2(): @@ -84,6 +90,8 @@ def test_case_2(): data = data.device_que() assert data.get_repeat_count() == 2 data.send() + time.sleep(0.1) + data.stop_send() def test_case_3(): @@ -109,13 +117,17 @@ def test_case_3(): data = data.device_que() data.send() + time.sleep(0.1) + data.stop_send() def test_case_tf_file(): data = ds.TFRecordDataset(TF_FILES, TF_SCHEMA_FILE, shuffle=ds.Shuffle.FILES) - data = data.to_device(num_batch=10) + data = data.to_device() data.send() + time.sleep(0.1) + data.stop_send() if __name__ == '__main__': diff --git a/tests/ut/python/dataset/test_epoch_ctrl.py b/tests/ut/python/dataset/test_epoch_ctrl.py new file mode 100644 index 0000000000..422eaefac9 --- /dev/null +++ b/tests/ut/python/dataset/test_epoch_ctrl.py @@ -0,0 +1,608 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +""" +Testing Epoch Control op in DE +""" +import itertools +import cv2 +import numpy as np +import pytest + +import mindspore.dataset as ds +import mindspore.dataset.transforms.vision.c_transforms as vision +from mindspore import log as logger + +DATA_DIR = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"] +SCHEMA_DIR = "../data/dataset/test_tf_file_3_images/datasetSchema.json" + + +def diff_mse(in1, in2): + """ + diff_mse + """ + mse = (np.square(in1.astype(float) / 255 - in2.astype(float) / 255)).mean() + return mse * 100 + +def test_cifar10(): + """ + dataset parameter + """ + logger.info("Test dataset parameter") + data_dir_10 = "../data/dataset/testCifar10Data" + num_repeat = 2 + batch_size = 32 + limit_dataset = 100 + # apply dataset operations + data1 = ds.Cifar10Dataset(data_dir_10, limit_dataset) + data1 = data1.repeat(num_repeat) + data1 = data1.batch(batch_size, True) + num_epoch = 5 + # iter1 will always assume there is a next epoch and never shutdown. + iter1 = data1.create_tuple_iterator() + epoch_count = 0 + sample_count = 0 + for _ in range(num_epoch): + row_count = 0 + for _ in iter1: + # in this example, each dictionary has keys "image" and "label" + row_count += 1 + assert row_count == int(limit_dataset * num_repeat / batch_size) + logger.debug("row_count: ", row_count) + epoch_count += 1 + sample_count += row_count + assert epoch_count == num_epoch + logger.debug("total epochs: ", epoch_count) + assert sample_count == int(limit_dataset * num_repeat / batch_size) * num_epoch + logger.debug("total sample: ", sample_count) + + +def test_decode_op(): + """ + Test Decode op + """ + logger.info("test_decode_op") + + # Decode with rgb format set to True + data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False) + + # Serialize and Load dataset requires using vision.Decode instead of vision.Decode(). + data1 = data1.map(input_columns=["image"], operations=[vision.Decode(True)]) + + # Second dataset + data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False) + + num_epoch = 5 + # iter1 will always assume there is a next epoch and never shutdown. + iter1 = data1.create_dict_iterator() + # iter 2 will stop and shutdown pipeline after num_epoch + iter2 = data2.create_dict_iterator(num_epoch) + for _ in range(num_epoch): + i = 0 + for item1, item2 in itertools.zip_longest(iter1, iter2): + actual = item1["image"] + expected = cv2.imdecode(item2["image"], cv2.IMREAD_COLOR) + expected = cv2.cvtColor(expected, cv2.COLOR_BGR2RGB) + assert actual.shape == expected.shape + diff = actual - expected + mse = np.sum(np.power(diff, 2)) + assert mse == 0 + i = i + 1 + assert i == 3 + + # Users have the option to manually stop the iterator, or rely on garbage collector. + iter1.stop() + # Expect a AttributeError since iter1 has been stopped. + with pytest.raises(AttributeError) as info: + iter1.__next__() + assert "object has no attribute 'depipeline'" in str(info.value) + + with pytest.raises(RuntimeError) as info: + iter2.__next__() + err_msg = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs." + assert err_msg in str(info.value) + + +# Generate 1d int numpy array from 0 - 63 +def generator_1d(): + """ + generator + """ + for i in range(64): + yield (np.array([i]),) + + +def test_generator_dict_0(): + """ + test generator dict 0 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + + i = 0 + # create the iterator inside the loop declaration + for item in data1.create_dict_iterator(): # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item["data"], golden) + i = i + 1 + +def test_generator_dict_1(): + """ + test generator dict 1 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + + for _ in range(10): + i = 0 + # BAD. Do not create iterator every time inside. + # Create iterator outside the epoch for loop. + for item in data1.create_dict_iterator(): # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item["data"], golden) + i = i + 1 + assert i == 64 + +def test_generator_dict_2(): + """ + test generator dict 2 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + iter1 = data1.create_dict_iterator() + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item["data"], golden) + i = i + 1 + assert i == 64 + + # iter1 is still alive and running. + item1 = iter1.__next__() + assert item1 + # rely on garbage collector to destroy iter1 + +def test_generator_dict_3(): + """ + test generator dict 3 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + iter1 = data1.create_dict_iterator() + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item["data"], golden) + i = i + 1 + assert i == 64 + # optional + iter1.stop() + # Expect a AttributeError since iter1 has been stopped. + with pytest.raises(AttributeError) as info: + iter1.__next__() + assert "object has no attribute 'depipeline'" in str(info.value) + + +def test_generator_dict_4(): + """ + test generator dict 4 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + iter1 = data1.create_dict_iterator(num_epochs=10) + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item["data"], golden) + i = i + 1 + assert i == 64 + + with pytest.raises(RuntimeError) as info: + iter1.__next__() + err_msg = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs." + assert err_msg in str(info.value) + +def test_generator_dict_4_1(): + """ + test generator dict 4_1 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + # epoch ctrl op will not be injected if num_epochs is 1. + iter1 = data1.create_dict_iterator(num_epochs=1) + for _ in range(1): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item["data"], golden) + i = i + 1 + assert i == 64 + + with pytest.raises(RuntimeError) as info: + iter1.__next__() + err_msg = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs." + assert err_msg in str(info.value) + +def test_generator_dict_4_2(): + """ + test generator dict 4_2 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + # repeat will not be injected when num repeat is 1. + data1 = data1.repeat(1) + # epoch ctrl op will not be injected if num_epochs is 1. + iter1 = data1.create_dict_iterator(num_epochs=1) + for _ in range(1): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item["data"], golden) + i = i + 1 + assert i == 64 + + with pytest.raises(RuntimeError) as info: + iter1.__next__() + err_msg = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs." + assert err_msg in str(info.value) + +def test_generator_dict_5(): + """ + test generator dict 5 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + iter1 = data1.create_dict_iterator(num_epochs=11) + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item["data"], golden) + i = i + 1 + assert i == 64 + + # still one more epoch left in the iter1. + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item["data"], golden) + i = i + 1 + assert i == 64 + + # now iter1 has been exhausted, c++ pipeline has been shut down. + with pytest.raises(RuntimeError) as info: + iter1.__next__() + err_msg = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs." + assert err_msg in str(info.value) + +# Test tuple iterator + +def test_generator_tuple_0(): + """ + test generator tuple 0 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + + i = 0 + # create the iterator inside the loop declaration + for item in data1.create_tuple_iterator(): # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item[0], golden) + i = i + 1 + +def test_generator_tuple_1(): + """ + test generator tuple 1 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + + for _ in range(10): + i = 0 + # BAD. Do not create iterator every time inside. + # Create iterator outside the epoch for loop. + for item in data1.create_tuple_iterator(): # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 + +def test_generator_tuple_2(): + """ + test generator tuple 2 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + iter1 = data1.create_tuple_iterator() + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 + + # iter1 is still alive and running. + item1 = iter1.__next__() + assert item1 + # rely on garbage collector to destroy iter1 + +def test_generator_tuple_3(): + """ + test generator tuple 3 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + iter1 = data1.create_tuple_iterator() + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 + # optional + iter1.stop() + # Expect a AttributeError since iter1 has been stopped. + with pytest.raises(AttributeError) as info: + iter1.__next__() + assert "object has no attribute 'depipeline'" in str(info.value) + + +def test_generator_tuple_4(): + """ + test generator tuple 4 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + iter1 = data1.create_tuple_iterator(num_epochs=10) + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 + + with pytest.raises(RuntimeError) as info: + iter1.__next__() + err_msg = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs." + assert err_msg in str(info.value) + + +def test_generator_tuple_5(): + """ + test generator tuple 5 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + iter1 = data1.create_tuple_iterator(num_epochs=11) + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 + + # still one more epoch left in the iter1. + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 + + # now iter1 has been exhausted, c++ pipeline has been shut down. + with pytest.raises(RuntimeError) as info: + iter1.__next__() + err_msg = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs." + assert err_msg in str(info.value) + +# Test with repeat +def test_generator_tuple_repeat_1(): + """ + test generator tuple repeat 1 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + data1 = data1.repeat(2) + iter1 = data1.create_tuple_iterator(num_epochs=11) + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i % 64]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 * 2 + + # still one more epoch left in the iter1. + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i % 64]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 * 2 + + # now iter1 has been exhausted, c++ pipeline has been shut down. + with pytest.raises(RuntimeError) as info: + iter1.__next__() + err_msg = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs." + assert err_msg in str(info.value) + + +# Test with repeat +def test_generator_tuple_repeat_repeat_1(): + """ + test generator tuple repeat repeat 1 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + data1 = data1.repeat(2) + data1 = data1.repeat(3) + iter1 = data1.create_tuple_iterator(num_epochs=11) + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i % 64]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 * 2 * 3 + + # still one more epoch left in the iter1. + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i % 64]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 * 2 * 3 + + # now iter1 has been exhausted, c++ pipeline has been shut down. + with pytest.raises(RuntimeError) as info: + iter1.__next__() + err_msg = "EOF buffer encountered. Users try to fetch data beyond the specified number of epochs." + assert err_msg in str(info.value) + + +def test_generator_tuple_repeat_repeat_2(): + """ + test generator tuple repeat repeat 2 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + data1 = data1.repeat(2) + data1 = data1.repeat(3) + iter1 = data1.create_tuple_iterator() + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i % 64]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 * 2 * 3 + # optional + iter1.stop() + # Expect a AttributeError since iter1 has been stopped. + with pytest.raises(AttributeError) as info: + iter1.__next__() + assert "object has no attribute 'depipeline'" in str(info.value) + +def test_generator_tuple_repeat_repeat_3(): + """ + test generator tuple repeat repeat 3 + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + data1 = data1.repeat(2) + data1 = data1.repeat(3) + iter1 = data1.create_tuple_iterator() + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i % 64]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 * 2 * 3 + + for _ in range(5): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i % 64]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 * 2 * 3 + + # rely on garbage collector to destroy iter1 + +def test_generator_reusedataset(): + """ + test generator reusedataset + """ + logger.info("Test 1D Generator : 0 - 63") + + # apply dataset operations + data1 = ds.GeneratorDataset(generator_1d, ["data"]) + data1 = data1.repeat(2) + iter1 = data1.create_tuple_iterator() + for _ in range(10): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i % 64]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 * 2 + + data1 = data1.repeat(3) + iter1 = data1.create_tuple_iterator() + for _ in range(5): + i = 0 + for item in iter1: # each data is a dictionary + golden = np.array([i % 64]) + assert np.array_equal(item[0], golden) + i = i + 1 + assert i == 64 * 2 * 3 + + data1 = data1.batch(2) + iter1 = data1.create_dict_iterator() + for _ in range(5): + i = 0 + sample = 0 + for item in iter1: # each data is a dictionary + golden = np.array([[i % 64], [(i + 1) % 64]]) + assert np.array_equal(item["data"], golden) + i = i + 2 + sample = sample + 1 + assert sample == 64 * 3 + + # rely on garbage collector to destroy iter1 diff --git a/tests/ut/python/dataset/test_five_crop.py b/tests/ut/python/dataset/test_five_crop.py index ef2e376c0f..86f52bdcd7 100644 --- a/tests/ut/python/dataset/test_five_crop.py +++ b/tests/ut/python/dataset/test_five_crop.py @@ -87,7 +87,7 @@ def test_five_crop_error_msg(): data = data.map(input_columns=["image"], operations=transform()) with pytest.raises(RuntimeError) as info: - data.create_tuple_iterator().get_next() + data.create_tuple_iterator().__next__() error_msg = "TypeError: img should be PIL Image or Numpy array. Got " # error msg comes from ToTensor() diff --git a/tests/ut/python/dataset/test_get_size.py b/tests/ut/python/dataset/test_get_size.py index ba4162788c..1dce312a32 100644 --- a/tests/ut/python/dataset/test_get_size.py +++ b/tests/ut/python/dataset/test_get_size.py @@ -41,18 +41,18 @@ def test_case1(): assert data.get_batch_size() == 2 assert data.get_repeat_count() == 1 data = data.repeat(10) - assert data.get_dataset_size() == 6 + assert data.get_dataset_size() == 60 assert data.get_batch_size() == 2 assert data.get_repeat_count() == 10 data = data.project(["new_column"]) - assert data.get_dataset_size() == 6 + assert data.get_dataset_size() == 60 assert data.get_batch_size() == 2 assert data.get_repeat_count() == 10 data2 = ds.TFRecordDataset(FILES, SCHEMA_FILE).batch(2).repeat(10) data1 = data.zip(data2) - assert data1.get_dataset_size() == 6 + assert data1.get_dataset_size() == 60 def test_case2(): @@ -65,14 +65,14 @@ def test_case2(): data = data.rename("col_sint64", "new_column") assert data.get_dataset_size() == 3 data = data.repeat(10) - assert data.get_dataset_size() == 3 + assert data.get_dataset_size() == 30 data = data.project(["new_column"]) - assert data.get_dataset_size() == 3 + assert data.get_dataset_size() == 30 data2 = ds.TFRecordDataset(FILES, num_samples=6).batch(2).repeat(10) data1 = data.zip(data2) - assert data1.get_dataset_size() == 3 + assert data1.get_dataset_size() == 30 def test_case3(): @@ -94,11 +94,11 @@ def test_case4(): data2 = data2.shuffle(100) assert data2.get_dataset_size() == 6 data2 = data2.repeat(3) - assert data2.get_dataset_size() == 6 + assert data2.get_dataset_size() == 18 data3 = ds.zip((data1, data2)) - assert data3.get_dataset_size() == 6 + assert data3.get_dataset_size() == 18 def test_case5(): diff --git a/tests/ut/python/dataset/test_iterator.py b/tests/ut/python/dataset/test_iterator.py index 6413e79612..70da93a0cc 100644 --- a/tests/ut/python/dataset/test_iterator.py +++ b/tests/ut/python/dataset/test_iterator.py @@ -73,7 +73,7 @@ def test_iterator_weak_ref(): _cleanup() with pytest.raises(AttributeError) as info: - itr2.get_next() + itr2.__next__() assert "object has no attribute 'depipeline'" in str(info.value) del itr1 diff --git a/tests/ut/python/dataset/test_repeat.py b/tests/ut/python/dataset/test_repeat.py index ca4702ff8c..f91e4da3ed 100644 --- a/tests/ut/python/dataset/test_repeat.py +++ b/tests/ut/python/dataset/test_repeat.py @@ -251,6 +251,49 @@ def test_nested_repeat11(): assert sum([1 for _ in data]) == 2 * 3 * 4 * 5 * 3 +def test_repeat_count1(): + data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False) + data1_size = data1.get_dataset_size() + logger.info("dataset size is {}".format(data1_size)) + batch_size = 2 + repeat_count = 4 + resize_height, resize_width = 32, 32 + decode_op = vision.Decode() + resize_op = vision.Resize((resize_height, resize_width), interpolation=ds.transforms.vision.Inter.LINEAR) + data1 = data1.map(input_columns=["image"], operations=decode_op) + data1 = data1.map(input_columns=["image"], operations=resize_op) + data1 = data1.repeat(repeat_count) + data1 = data1.batch(batch_size, drop_remainder=False) + dataset_size = data1.get_dataset_size() + logger.info("dataset repeat then batch's size is {}".format(dataset_size)) + num1_iter = 0 + for _ in data1.create_dict_iterator(): + num1_iter += 1 + + assert data1_size == 3 + assert dataset_size == num1_iter == 6 + +def test_repeat_count2(): + data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False) + data1_size = data1.get_dataset_size() + logger.info("dataset size is {}".format(data1_size)) + batch_size = 2 + repeat_count = 4 + resize_height, resize_width = 32, 32 + decode_op = vision.Decode() + resize_op = vision.Resize((resize_height, resize_width), interpolation=ds.transforms.vision.Inter.LINEAR) + data1 = data1.map(input_columns=["image"], operations=decode_op) + data1 = data1.map(input_columns=["image"], operations=resize_op) + data1 = data1.batch(batch_size, drop_remainder=False) + data1 = data1.repeat(repeat_count) + dataset_size = data1.get_dataset_size() + logger.info("dataset batch then repeat's size is {}".format(dataset_size)) + num1_iter = 0 + for _ in data1.create_dict_iterator(): + num1_iter += 1 + + assert data1_size == 3 + assert dataset_size == num1_iter == 8 if __name__ == "__main__": test_tf_repeat_01() @@ -268,3 +311,5 @@ if __name__ == "__main__": test_nested_repeat9() test_nested_repeat10() test_nested_repeat11() + test_repeat_count1() + test_repeat_count2() diff --git a/tests/ut/python/dataset/test_zip.py b/tests/ut/python/dataset/test_zip.py index a00a0823d4..ebfab86aff 100644 --- a/tests/ut/python/dataset/test_zip.py +++ b/tests/ut/python/dataset/test_zip.py @@ -252,14 +252,14 @@ def test_zip_exception_06(): if __name__ == '__main__': test_zip_01() - test_zip_02() - test_zip_03() - test_zip_04() - test_zip_05() - test_zip_06() - test_zip_exception_01() - test_zip_exception_02() - test_zip_exception_03() - test_zip_exception_04() - test_zip_exception_05() - test_zip_exception_06() + #test_zip_02() + #test_zip_03() + #test_zip_04() + #test_zip_05() + #test_zip_06() + #test_zip_exception_01() + #test_zip_exception_02() + #test_zip_exception_03() + #test_zip_exception_04() + #test_zip_exception_05() + #test_zip_exception_06() diff --git a/tests/ut/python/log b/tests/ut/python/log new file mode 100644 index 0000000000..336fe112b3 --- /dev/null +++ b/tests/ut/python/log @@ -0,0 +1,2770 @@ +BUILD_PATH = /home/anzhengqi/workspace/mindspore/tests/ut/python/../../../build +export PYTHONPATH=/home/anzhengqi/workspace/mindspore:/home/anzhengqi/workspace/mindspore/tests/ut/python/../../..:/home/anzhengqi/workspace/mindspore/tests/ut/python/../../../tests/ut/cpp/python_input:/home/anzhengqi/workspace/mindspore/tests/ut/python/../../../tests/ut/python +[INFO] ME(31040,python):2020-05-11-11:13:20.087.883 [mindspore/ccsrc/parallel/costmodel_context.cc:30] GetInstance] Create costmodel_context +[INFO] ME(31040,python):2020-05-11-11:13:20.321.343 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is ApplyMomentum, outindex is 0, in_index is 0 +[INFO] ME(31040,python):2020-05-11-11:13:20.321.975 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is Adam, outindex is 0, in_index is 0 +[INFO] ME(31040,python):2020-05-11-11:13:20.321.985 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is Adam, outindex is 1, in_index is 1 +[INFO] ME(31040,python):2020-05-11-11:13:20.321.989 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is Adam, outindex is 2, in_index is 2 +[INFO] ME(31040,python):2020-05-11-11:13:20.322.719 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is AssignAdd, outindex is 0, in_index is 0 +[INFO] ME(31040,python):2020-05-11-11:13:20.336.425 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is BNTrainingUpdate, outindex is 1, in_index is 5 +[INFO] ME(31040,python):2020-05-11-11:13:20.336.433 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is BNTrainingUpdate, outindex is 2, in_index is 6 +[INFO] ME(31040,python):2020-05-11-11:13:20.358.611 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is FusedMulApplyMomentum, outindex is 0, in_index is 0 +[INFO] ME(31040,python):2020-05-11-11:13:20.361.290 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is ScatterNdUpdate, outindex is 0, in_index is 0 +============================= test session starts ============================== +platform linux -- Python 3.7.5, pytest-5.4.1, py-1.8.1, pluggy-0.13.1 +rootdir: /home/anzhengqi/workspace/mindspore +collected 14 items +[INFO] ME(31040:140077051942720,MainProcess):2020-05-11-11:13:20.390.775 [mindspore/tests/ut/python/dataset/test_repeat.py:46] Test Simple Repeat +[INFO] ME(31040,python):2020-05-11-11:13:20.391.395 [mindspore/ccsrc/utils/context/ms_context.cc:105] set_backend_policy] ms set context backend policy:ms +[INFO] ME(31040,python):2020-05-11-11:13:20.391.503 [mindspore/ccsrc/dataset/util/arena.cc:41] Init] Size of memory pool is 262144, number of blocks of size is 64. +[INFO] ME(31040,python):2020-05-11-11:13:20.391.531 [mindspore/ccsrc/dataset/util/task_manager.cc:129] DoServiceStart] Starting Task Manager. +[INFO] ME(31040,python):2020-05-11-11:13:20.391.661 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Watchdog Thread ID 140076915136256 Started. +[WARNING] ME(31040,python):2020-05-11-11:13:20.391.839 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:122] Build] TFReader operator parallelism reduced to 1 workers. +[INFO] ME(31040,python):2020-05-11-11:13:20.391.867 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.391.905 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.391.965 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.391.970 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.391.983 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.392.025 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.392.029 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:20.392.045 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 2] + +- ( 1) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Rows per buffer: 1 +Total rows: 12 +Device id: 0 +Number of devices: 1 +Shuffle files: no +Dataset files list: + +Data Schema: +Dataset type string : (none specified) + Name : col_1d + Type : int64 + Rank : 1 + Shape : (<2>) + + Name : col_2d + Type : int64 + Rank : 2 + Shape : (<2,2>) + + Name : col_3d + Type : int64 + Rank : 3 + Shape : (<2,2,2>) + + Name : col_binary + Type : uint8 + Rank : 1 + Shape : (<1>) + + Name : col_float + Type : float32 + Rank : 1 + Shape : (<1>) + + Name : col_sint16 + Type : int16 + Rank : 1 + Shape : (<1>) + + Name : col_sint32 + Type : int32 + Rank : 1 + Shape : (<1>) + + Name : col_sint64 + Type : int64 + Rank : 1 + Shape : (<1>) + +Dataset type: 0 + + + +[INFO] ME(31040,python):2020-05-11-11:13:20.392.147 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:1 Thread ID 140076923528960 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.392.255 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076931921664 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.392.373 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076940314368 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.392.719 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/testTFTestAllTypes/test.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.392.736 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.393.403 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.393.412 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.393.670 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/testTFTestAllTypes/test.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.394.333 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:20.394.343 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:20.394.347 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:20.394.364 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:1 Thread ID 140076923528960 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:20.394.371 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076931921664 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:20.394.375 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076940314368 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:20.394.392 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 13. +[INFO] ME(31040:140077051942720,MainProcess):2020-05-11-11:13:20.394.409 [mindspore/tests/ut/python/dataset/util.py:91] Number of data in data1: 24 + +dataset/test_repeat.py .[INFO] ME(31040:140077051942720,MainProcess):2020-05-11-11:13:20.398.278 [mindspore/tests/ut/python/dataset/test_repeat.py:63] Test Infinite Repeat +[WARNING] ME(31040,python):2020-05-11-11:13:20.398.837 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:122] Build] TFReader operator parallelism reduced to 1 workers. +[INFO] ME(31040,python):2020-05-11-11:13:20.398.865 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.398.903 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.398.961 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.398.966 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.398.978 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.399.012 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.399.016 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:20.399.030 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: -1] + +- ( 1) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: -1 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Rows per buffer: 1 +Total rows: 12 +Device id: 0 +Number of devices: 1 +Shuffle files: no +Dataset files list: + +Data Schema: +Dataset type string : (none specified) + Name : col_1d + Type : int64 + Rank : 1 + Shape : (<2>) + + Name : col_2d + Type : int64 + Rank : 2 + Shape : (<2,2>) + + Name : col_3d + Type : int64 + Rank : 3 + Shape : (<2,2,2>) + + Name : col_binary + Type : uint8 + Rank : 1 + Shape : (<1>) + + Name : col_float + Type : float32 + Rank : 1 + Shape : (<1>) + + Name : col_sint16 + Type : int16 + Rank : 1 + Shape : (<1>) + + Name : col_sint32 + Type : int32 + Rank : 1 + Shape : (<1>) + + Name : col_sint64 + Type : int64 + Rank : 1 + Shape : (<1>) + +Dataset type: 0 + + + +[INFO] ME(31040,python):2020-05-11-11:13:20.399.123 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:1 Thread ID 140076940314368 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.399.218 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076931921664 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.399.314 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076923528960 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.399.652 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/testTFTestAllTypes/test.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.399.709 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.400.269 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.400.277 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.400.559 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/testTFTestAllTypes/test.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.400.575 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.401.060 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:20.401.066 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.401.320 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/testTFTestAllTypes/test.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.401.334 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.401.820 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:20.401.827 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.402.112 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/testTFTestAllTypes/test.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.402.124 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.402.670 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:20.402.679 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.402.960 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/testTFTestAllTypes/test.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.402.972 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.403.498 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 5. +[INFO] ME(31040,python):2020-05-11-11:13:20.403.505 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.403.796 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/testTFTestAllTypes/test.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.403.812 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.404.331 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 6. +[INFO] ME(31040,python):2020-05-11-11:13:20.404.338 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.404.631 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/testTFTestAllTypes/test.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.404.643 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.405.118 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 7. +[INFO] ME(31040,python):2020-05-11-11:13:20.405.124 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.405.417 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/testTFTestAllTypes/test.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.405.428 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.405.920 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 8. +[INFO] ME(31040,python):2020-05-11-11:13:20.405.927 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.406.239 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/testTFTestAllTypes/test.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.406.251 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.407.601 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:1 Thread ID 140076940314368 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:20.407.629 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076931921664 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:20.407.634 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076923528960 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:20.407.658 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 13. + +test_tf_repeat_01 --- passed +.[INFO] ME(31040,python):2020-05-11-11:13:20.408.494 [mindspore/ccsrc/dataset/kernels/image/decode_op.cc:27] DecodeOp] Decode colour mode is RGB. +[WARNING] ME(31040:140077051942720,MainProcess):2020-05-11-11:13:20.408.651 [mindspore/dataset/engine/datasets.py:1039] Repeat is located before batch, data from two epochs can be batched together. +[WARNING] ME(31040,python):2020-05-11-11:13:20.408.986 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:122] Build] TFReader operator parallelism reduced to 1 workers. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.010 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.047 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.110 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 4. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.122 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.161 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 3. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.173 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 4 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.264 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.275 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 4 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.365 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.369 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.373 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.384 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 4 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.485 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 4] [batch size: 32] + +- ( 1) : [workers: 0 (inlined)] [repeats: 22] + +- ( 2) : [workers: 4] + +- ( 3) : [workers: 4] + +- ( 4) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 16 +Operator control flags : 0x00000000 +Num workers: 4 +Start batch size: 32 +Drop remainder: yes + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 22 +Leaf Nodes in execution path: + Operator: 4 + +( 2) : +Number of children : 1 + Child[0] id: 3 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 4 +Input column names: image + TensorOps: 0x55ba72498180 + +( 3) : +Number of children : 1 + Child[0] id: 4 +Number of parents : 1 + Parent[0] id: 2 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 4 +Input column names: image + TensorOps: 0x55ba725a3320 + +( 4) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 3 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Rows per buffer: 1 +Total rows: 3 +Device id: 0 +Number of devices: 1 +Shuffle files: no +Dataset files list: + +Data Schema: +Dataset type string : (none specified) + Name : image + Type : uint8 + Rank : 1 + Shape : (no shape provided) + + Name : label + Type : uint64 + Rank : 1 + Shape : (no shape provided) + +Dataset type: 0 + + + +[INFO] ME(31040,python):2020-05-11-11:13:20.409.559 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:4 Thread ID 140076923528960 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.651 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076931921664 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.749 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076940314368 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.409.863 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:3 Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.410.054 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076353013504 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.410.152 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076344620800 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.410.228 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075862062848 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.410.324 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075853670144 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.410.432 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.410.456 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.410.491 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:2 Thread ID 140075845277440 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.410.719 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075809449728 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.410.836 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075538757376 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.410.912 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075459409664 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.411.038 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075451016960 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.411.154 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:0 Thread ID 140075442624256 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.411.244 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075434231552 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.413.072 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075425838848 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.413.153 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075417446144 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.413.229 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075409053440 Started. +[INFO] ME(31040,python):2020-05-11-11:13:20.462.220 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:20.462.240 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.462.573 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.462.596 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.510.953 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:20.510.978 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.511.351 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.511.374 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.553.766 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:20.553.789 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.554.142 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.554.165 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.597.017 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:20.597.040 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.597.474 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.597.495 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.640.116 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 5. +[INFO] ME(31040,python):2020-05-11-11:13:20.640.141 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.640.473 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.640.494 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.683.221 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 6. +[INFO] ME(31040,python):2020-05-11-11:13:20.683.244 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.683.604 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.683.680 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.726.268 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 7. +[INFO] ME(31040,python):2020-05-11-11:13:20.726.292 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.726.648 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.726.692 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.769.383 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 8. +[INFO] ME(31040,python):2020-05-11-11:13:20.769.407 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.769.787 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.769.830 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.812.822 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 9. +[INFO] ME(31040,python):2020-05-11-11:13:20.812.848 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.813.222 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.813.315 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.855.942 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 10. +[INFO] ME(31040,python):2020-05-11-11:13:20.855.968 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.856.444 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.856.469 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.899.053 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 11. +[INFO] ME(31040,python):2020-05-11-11:13:20.899.076 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.899.456 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.899.554 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.942.076 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 12. +[INFO] ME(31040,python):2020-05-11-11:13:20.942.101 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.942.481 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.942.502 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.985.122 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 13. +[INFO] ME(31040,python):2020-05-11-11:13:20.985.180 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:20.985.540 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:20.985.561 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.028.821 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 14. +[INFO] ME(31040,python):2020-05-11-11:13:21.028.845 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.029.297 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:21.029.349 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.071.927 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 15. +[INFO] ME(31040,python):2020-05-11-11:13:21.071.953 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.072.441 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:21.072.465 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.115.420 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 16. +[INFO] ME(31040,python):2020-05-11-11:13:21.115.446 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.115.772 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:21.115.806 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.158.335 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 17. +[INFO] ME(31040,python):2020-05-11-11:13:21.158.362 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.158.648 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:21.158.689 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.202.058 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 18. +[INFO] ME(31040,python):2020-05-11-11:13:21.202.077 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.202.412 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:21.202.468 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.257.951 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 19. +[INFO] ME(31040,python):2020-05-11-11:13:21.257.977 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.258.345 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:21.258.366 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.301.090 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 20. +[INFO] ME(31040,python):2020-05-11-11:13:21.301.113 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.301.455 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:21.301.476 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.344.206 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 21. +[INFO] ME(31040,python):2020-05-11-11:13:21.344.233 [mindspore/ccsrc/dataset/engine/connector.h:141] Reset] Connector counters reset. +[INFO] ME(31040,python):2020-05-11-11:13:21.344.607 [mindspore/ccsrc/dataset/engine/datasetops/source/tf_reader_op.cc:353] WorkerEntry] TFReader operator worker 0 loaded file ../data/dataset/test_tf_file_3_images/train-0000-of-0001.data. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.182 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 22. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.204 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.215 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.222 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:193] FetchNextTensorRow] Child iterator picked up EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.295 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.364 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:4 Thread ID 140076923528960 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.373 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076931921664 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.378 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076940314368 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.383 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:3 Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.426 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076353013504 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.510 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076344620800 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.528 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075862062848 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.540 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075853670144 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.550 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:2 Thread ID 140075845277440 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.559 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075809449728 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.568 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075538757376 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.576 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075459409664 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.586 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075451016960 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.595 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:0 Thread ID 140075442624256 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.611 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075434231552 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.621 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075425838848 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.630 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075417446144 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.639 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075409053440 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.387.696 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 94. +[INFO] ME(31040:140077051942720,MainProcess):2020-05-11-11:13:21.387.732 [mindspore/tests/ut/python/dataset/test_repeat.py:95] Number of tf data in data1: 2 + +test_tf_repeat_02 --- passed +.[INFO] ME(31040,python):2020-05-11-11:13:21.388.871 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.388.907 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.388.913 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.388.925 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.388.957 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.388.961 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.388.964 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.388.968 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.388.971 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.388.981 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 3] + +- ( 1) : [workers: 0 (inlined)] [repeats: 2] + +- ( 2) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 2 + +( 2) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.389.067 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:2 Thread ID 140075409053440 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.295 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.393 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.482 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.553 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.560 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.656 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.692 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.796 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.831 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.837 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.941 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.389.975 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.017 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.023 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.026 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.054 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.060 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.063 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.067 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.070 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.080 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:2 Thread ID 140075409053440 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.091 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.194 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.224 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.229 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.241 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.273 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.277 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.280 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.284 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.287 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.296 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 3] + +- ( 1) : [workers: 0 (inlined)] [repeats: 2] + +- ( 2) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 2 + +( 2) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.390.366 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:2 Thread ID 140075409053440 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.442 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.502 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.592 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.642 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.648 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.734 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.782 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.883 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.931 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.390.936 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.033 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.064 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.107 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.114 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.117 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.141 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.146 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.150 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.154 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.157 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.167 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:2 Thread ID 140075409053440 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.178 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. + +test_tf_repeat_03 --- passed +.[INFO] ME(31040,python):2020-05-11-11:13:21.391.929 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.955 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.391.968 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.003 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 0 +Number of parents : 0 +Connector queue size : 16 +Operator control flags : 0x00000000 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.392.041 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:0 Thread ID 140075409053440 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.104 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.114 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.117 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.157 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.171 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:0 Thread ID 140075409053440 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.182 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.221 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.242 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.254 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.289 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 0 +Number of parents : 0 +Connector queue size : 16 +Operator control flags : 0x00000000 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.392.321 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:0 Thread ID 140075409053440 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.377 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.383 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.386 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.420 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.434 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:0 Thread ID 140075409053440 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.392.445 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. + +test_nested_repeat1 --- passed +.[INFO] ME(31040,python):2020-05-11-11:13:21.393.197 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.224 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.230 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.242 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.275 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.282 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.290 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 2] + +- ( 1) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.393.324 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:1 Thread ID 140075409053440 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.378 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.432 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.474 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.479 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.483 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.525 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.532 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.536 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.546 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:1 Thread ID 140075409053440 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.557 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.628 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.652 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.657 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.668 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.701 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.705 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.715 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 2] + +- ( 1) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.393.749 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:1 Thread ID 140075409053440 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.807 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.851 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.893 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.899 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.902 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.930 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.936 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.939 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.950 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:1 Thread ID 140075409053440 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.393.961 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. + +test_nested_repeat2 --- passed +.[INFO] ME(31040,python):2020-05-11-11:13:21.394.720 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.394.747 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.394.752 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.394.764 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.394.797 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.394.801 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.394.808 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 2] + +- ( 1) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.394.844 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:1 Thread ID 140075409053440 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.394.968 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.033 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.123 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.132 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.136 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.162 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.168 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.186 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.195 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:1 Thread ID 140075409053440 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.206 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.275 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.298 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.303 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.314 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.346 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.350 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.358 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 2] + +- ( 1) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.395.424 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:1 Thread ID 140075409053440 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.498 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.554 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.643 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.653 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.657 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.678 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.684 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.688 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.698 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:1 Thread ID 140075409053440 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.395.709 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. + +test_nested_repeat3 --- passed +.[INFO] ME(31040,python):2020-05-11-11:13:21.396.652 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.678 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 3. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.692 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.725 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.730 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.740 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 4 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.832 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.836 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.840 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.844 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.850 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.865 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 3] + +- ( 1) : [workers: 0 (inlined)] [repeats: 2] + +- ( 2) : [workers: 4] [batch size: 3] + +- ( 3) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 3 + +( 2) : +Number of children : 1 + Child[0] id: 3 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 4 +Start batch size: 3 +Drop remainder: no + +( 3) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 2 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.396.901 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:3 Thread ID 140075409053440 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.396.956 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:2 Thread ID 140075417446144 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.040 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075425838848 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.095 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075434231552 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.149 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076940470016 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.203 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076932077312 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.367 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.383 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.479 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.532 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.544 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.594 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.600 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.688 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.704 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.799 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.861 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.875 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.895 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.397.915 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.002 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.014 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.092 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.180 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.191 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.193 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.195 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.203 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:193] FetchNextTensorRow] Child iterator picked up EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.211 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.221 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.230 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.258 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.262 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.277 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:3 Thread ID 140075409053440 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.283 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:2 Thread ID 140075417446144 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.288 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075425838848 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.293 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075434231552 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.321 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076940470016 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.333 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076932077312 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.356 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 33. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.636 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.675 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 3. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.687 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.738 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.742 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.767 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 4 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.856 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.860 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.863 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.866 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.869 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.398.880 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 3] + +- ( 1) : [workers: 0 (inlined)] [repeats: 2] + +- ( 2) : [workers: 4] [batch size: 3] + +- ( 3) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 3 + +( 2) : +Number of children : 1 + Child[0] id: 3 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 4 +Start batch size: 3 +Drop remainder: no + +( 3) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 2 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.398.960 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:3 Thread ID 140076932077312 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.063 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:2 Thread ID 140076940470016 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.149 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075434231552 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.240 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075425838848 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.292 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076923684608 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.345 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.449 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.464 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.547 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.611 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.626 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.644 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.652 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.701 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.712 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.803 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.868 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.883 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.898 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.905 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.956 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.399.966 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.067 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.132 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.147 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.146 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.169 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:193] FetchNextTensorRow] Child iterator picked up EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.174 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.171 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.190 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.198 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.216 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.221 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.238 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:3 Thread ID 140076932077312 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.245 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:2 Thread ID 140076940470016 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.250 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075434231552 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.255 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075425838848 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.266 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076923684608 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.279 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.400.303 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 33. + +test_nested_repeat4 --- passed +.[WARNING] ME(31040:140077051942720,MainProcess):2020-05-11-11:13:21.401.068 [mindspore/dataset/engine/datasets.py:1039] Repeat is located before batch, data from two epochs can be batched together. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.352 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.379 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 3. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.392 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.425 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.429 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.433 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.437 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.452 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 4 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.553 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.558 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.572 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 3] + +- ( 1) : [workers: 4] [batch size: 3] + +- ( 2) : [workers: 0 (inlined)] [repeats: 2] + +- ( 3) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 2 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 4 +Start batch size: 3 +Drop remainder: no + +( 2) : +Number of children : 1 + Child[0] id: 3 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 3 + +( 3) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 2 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.401.651 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:3 Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.727 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:1 Thread ID 140076923684608 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.824 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075425838848 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.401.918 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075434231552 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.023 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076940470016 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.080 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076932077312 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.228 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.252 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.363 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.377 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.382 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.447 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.552 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.565 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.735 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.749 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.756 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.810 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.908 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.402.967 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.012 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.019 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.022 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.051 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.060 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.067 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.073 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:193] FetchNextTensorRow] Child iterator picked up EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.143 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.150 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.154 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.167 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:3 Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.173 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:1 Thread ID 140076923684608 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.178 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075425838848 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.183 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075434231552 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.195 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076940470016 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.209 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076932077312 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.232 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 33. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.441 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.465 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 3. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.478 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.511 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.515 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.519 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.522 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.533 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 4 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.624 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.628 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.639 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 3] + +- ( 1) : [workers: 4] [batch size: 3] + +- ( 2) : [workers: 0 (inlined)] [repeats: 2] + +- ( 3) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 2 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 4 +Start batch size: 3 +Drop remainder: no + +( 2) : +Number of children : 1 + Child[0] id: 3 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 3 + +( 3) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 2 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.403.676 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:3 Thread ID 140076932077312 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.721 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:1 Thread ID 140076940470016 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.770 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075434231552 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.812 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075425838848 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.863 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076923684608 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.914 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.987 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.403.999 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.077 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.088 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.094 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.123 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.176 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.190 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.253 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.263 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.269 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.325 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.366 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.376 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.442 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.450 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.454 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.456 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.464 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.470 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.477 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:193] FetchNextTensorRow] Child iterator picked up EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.514 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.522 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.526 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.542 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:3 Thread ID 140076932077312 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.549 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:1 Thread ID 140076940470016 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.554 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075434231552 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.558 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075425838848 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.568 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076923684608 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.578 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.404.601 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 33. + +test_nested_repeat5 --- passed +.[WARNING] ME(31040:140077051942720,MainProcess):2020-05-11-11:13:21.405.354 [mindspore/dataset/engine/datasets.py:1039] Repeat is located before batch, data from two epochs can be batched together. +[INFO] ME(31040,python):2020-05-11-11:13:21.405.613 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.405.640 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 3. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.405.653 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.405.686 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.405.690 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.405.694 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.405.697 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.405.701 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.405.712 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 4 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.405.811 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 4] [batch size: 3] + +- ( 1) : [workers: 0 (inlined)] [repeats: 3] + +- ( 2) : [workers: 0 (inlined)] [repeats: 2] + +- ( 3) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 16 +Operator control flags : 0x00000000 +Num workers: 4 +Start batch size: 3 +Drop remainder: no + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 2 + +( 2) : +Number of children : 1 + Child[0] id: 3 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 3 + +( 3) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 2 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.405.893 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:3 Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.405.933 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:0 Thread ID 140076923684608 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.023 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075425838848 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.067 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075434231552 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.116 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076940470016 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.173 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076932077312 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.321 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.336 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.453 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.466 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.471 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.587 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.600 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.694 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.707 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.712 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.931 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.406.946 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.041 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.049 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.051 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.053 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.056 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.060 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.067 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.072 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.076 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:193] FetchNextTensorRow] Child iterator picked up EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.181 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.201 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:3 Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.208 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:0 Thread ID 140076923684608 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.213 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075425838848 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.218 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075434231552 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.230 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076940470016 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.240 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076932077312 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.263 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 33. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.474 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.500 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 3. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.513 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.545 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.550 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.553 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.560 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.564 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.575 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 4 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.673 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 4] [batch size: 3] + +- ( 1) : [workers: 0 (inlined)] [repeats: 3] + +- ( 2) : [workers: 0 (inlined)] [repeats: 2] + +- ( 3) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 16 +Operator control flags : 0x00000000 +Num workers: 4 +Start batch size: 3 +Drop remainder: no + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 2 + +( 2) : +Number of children : 1 + Child[0] id: 3 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 3 + +( 3) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 2 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.407.716 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:3 Thread ID 140076932077312 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.810 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:0 Thread ID 140076940470016 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.859 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075434231552 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.407.945 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075425838848 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.002 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076923684608 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.051 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.123 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.140 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.197 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.208 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.214 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.268 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.281 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.324 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.334 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.339 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.383 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.393 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.474 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.482 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.484 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.486 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.491 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.499 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.506 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.512 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.515 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.522 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:193] FetchNextTensorRow] Child iterator picked up EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.565 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:3 Thread ID 140076932077312 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.573 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:0 Thread ID 140076940470016 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.579 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075434231552 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.583 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075425838848 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.595 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076923684608 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.610 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.408.634 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 33. + +test_nested_repeat6 --- passed +.[INFO] ME(31040,python):2020-05-11-11:13:21.409.579 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.610 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 3. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.623 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.655 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.660 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.671 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 4 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.761 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.766 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.769 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.773 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.776 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.786 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 3] + +- ( 1) : [workers: 0 (inlined)] [repeats: 2] + +- ( 2) : [workers: 4] [batch size: 2] + +- ( 3) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 3 + +( 2) : +Number of children : 1 + Child[0] id: 3 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 4 +Start batch size: 2 +Drop remainder: no + +( 3) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 2 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.409.864 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:3 Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.904 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:2 Thread ID 140076923684608 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.409.992 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075425838848 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.033 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075434231552 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.088 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076940470016 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.137 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076932077312 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.253 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.268 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.334 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.433 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.447 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.541 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.548 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.644 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.656 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.723 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.828 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.840 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.879 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.885 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.987 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.410.999 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.038 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.083 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.091 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.093 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.094 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.104 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:193] FetchNextTensorRow] Child iterator picked up EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.150 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.157 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.160 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.164 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.168 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.181 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:3 Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.188 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:2 Thread ID 140076923684608 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.193 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075425838848 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.198 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075434231552 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.210 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076940470016 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.219 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076932077312 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.242 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 33. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.449 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.474 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 3. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.487 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.519 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.524 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.534 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 4 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.624 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.628 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.632 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.635 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 4. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.638 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.652 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 3] + +- ( 1) : [workers: 0 (inlined)] [repeats: 2] + +- ( 2) : [workers: 4] [batch size: 2] + +- ( 3) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 3 + +( 2) : +Number of children : 1 + Child[0] id: 3 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 4 +Start batch size: 2 +Drop remainder: no + +( 3) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 2 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.411.690 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:3 Thread ID 140076932077312 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.727 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:2 Thread ID 140076940470016 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.771 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075434231552 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.808 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140075425838848 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.853 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076923684608 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.902 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Parallel Op Worker Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.975 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.411.988 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.027 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.080 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.093 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.121 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.126 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.174 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.186 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.218 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.267 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.280 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.339 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.346 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.396 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.408 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.440 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.490 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.499 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.502 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:187] FetchNextTensorRow] Child iterator picked up EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.505 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.516 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:193] FetchNextTensorRow] Child iterator picked up EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.542 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.549 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.553 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.557 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.560 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.574 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:3 Thread ID 140076932077312 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.581 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:2 Thread ID 140076940470016 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.586 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075434231552 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.591 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140075425838848 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.603 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076923684608 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.614 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Parallel Op Worker Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.412.641 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 33. + +test_nested_repeat7 --- passed +.[INFO] ME(31040,python):2020-05-11-11:13:21.413.439 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.467 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.473 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.484 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.524 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.529 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.532 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.536 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.539 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.548 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 3] + +- ( 1) : [workers: 0 (inlined)] [repeats: -1] + +- ( 2) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: -1 +Leaf Nodes in execution path: + Operator: 2 + +( 2) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.413.609 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:2 Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.682 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.737 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.818 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.858 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.933 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.413.971 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.414.050 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.414.179 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:2 Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.414.194 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. + +test_nested_repeat8 --- passed +.[INFO] ME(31040,python):2020-05-11-11:13:21.414.977 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.004 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.009 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.022 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.055 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.060 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.063 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.067 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.070 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.079 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: -1] + +- ( 1) : [workers: 0 (inlined)] [repeats: 3] + +- ( 2) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: -1 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 2 + +( 2) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.415.112 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:2 Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.168 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.222 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.270 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.310 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.353 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.391 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.397 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.436 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.476 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:2 Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.415.490 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. + +test_nested_repeat9 --- passed +.[INFO] ME(31040,python):2020-05-11-11:13:21.416.696 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.739 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.748 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 4. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.768 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.815 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.823 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 3. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.829 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.835 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.841 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.846 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.851 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.858 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.863 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.869 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.878 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.416.899 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 5] + +- ( 1) : [workers: 0 (inlined)] [repeats: 4] + +- ( 2) : [workers: 0 (inlined)] [repeats: 3] + +- ( 3) : [workers: 0 (inlined)] [repeats: 2] + +- ( 4) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 5 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 4 +Leaf Nodes in execution path: + Operator: 2 + +( 2) : +Number of children : 1 + Child[0] id: 3 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 3 + +( 3) : +Number of children : 1 + Child[0] id: 4 +Number of parents : 1 + Parent[0] id: 2 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 4 + +( 4) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 3 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.416.950 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:4 Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.014 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.104 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.150 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.228 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.239 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.314 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.362 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.406 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.478 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.488 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.548 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.615 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.717 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.758 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.765 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.768 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.864 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.417.901 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.000 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.048 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.057 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.101 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.169 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.208 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.299 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.308 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.343 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.441 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.479 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.580 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.589 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.593 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.630 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.728 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.766 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.867 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.876 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.418.951 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.043 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.118 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.186 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.192 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.229 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.269 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.308 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.345 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.351 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.354 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.390 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.428 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.466 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.502 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.508 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.547 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.585 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.622 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.659 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.664 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.700 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.737 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.775 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.811 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.817 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.820 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.823 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.859 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.897 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.934 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.971 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.419.977 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.012 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.049 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.086 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.123 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.129 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.166 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.204 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.242 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.279 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.284 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.288 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.323 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.361 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.398 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.435 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.440 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.475 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.512 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.549 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.586 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.591 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.627 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.664 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.701 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.737 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.743 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.746 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.784 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.822 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.859 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.896 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.902 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.937 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.420.974 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.011 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.047 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.052 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.088 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.125 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.162 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.199 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.204 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.208 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.243 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.280 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.318 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.354 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.360 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.395 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.432 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.470 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.521 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.528 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.567 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.603 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.641 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.677 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.683 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.686 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.689 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.724 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.762 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.800 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.837 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.842 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.878 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.915 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.952 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.988 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.421.994 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.029 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.066 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.104 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.141 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.147 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.150 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.185 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.223 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.260 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.298 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.303 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.339 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.376 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.413 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.449 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.455 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.490 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.527 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.564 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.601 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.606 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.609 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.644 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.682 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.721 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.758 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.763 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.799 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.836 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.873 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.910 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.915 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.950 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.422.987 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.025 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.062 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.067 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.070 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.105 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.164 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.203 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.240 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.245 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.281 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.318 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.358 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.395 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.401 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.436 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.473 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.510 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.547 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.552 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.556 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.559 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.594 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.632 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.669 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.706 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.712 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.747 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.784 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.821 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.858 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.864 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.899 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.936 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.423.975 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.013 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.018 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.022 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.057 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.095 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.132 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.168 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.174 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.209 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.247 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.284 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.320 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.326 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.361 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.397 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.435 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.471 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.476 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.479 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.515 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.552 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.592 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.629 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.635 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.670 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.707 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.745 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.781 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.787 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.822 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.859 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.896 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.933 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.938 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.941 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.424.977 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.014 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.051 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.087 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.093 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.128 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.166 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.203 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.240 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.245 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.281 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.318 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.355 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.392 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.398 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.401 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.404 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.440 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.477 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.522 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.561 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.567 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.604 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.641 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.678 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.716 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.721 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.756 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.793 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.831 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.867 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.873 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.876 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.912 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.949 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.425.986 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.023 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.028 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.064 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.101 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.138 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.175 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.181 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.216 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.253 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.290 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.327 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.332 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.336 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.371 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.408 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.445 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.482 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.487 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.524 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.561 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.598 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.635 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.640 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.676 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.713 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.750 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.786 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.792 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.795 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.830 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.867 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.904 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.941 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.946 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.426.982 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.019 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.057 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.093 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.101 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.137 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.175 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.212 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.217 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.221 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.249 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.255 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.258 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.261 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 5. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.265 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.269 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.272 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.275 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.278 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.290 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:4 Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.305 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.457 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:35] SanityCheck] Generator operator sanity check, prefetch size is 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.488 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.493 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 4. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.506 [mindspore/ccsrc/dataset/engine/connector.h:76] Connector] A connector is created with 1 producers and 1 consumers. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.539 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.546 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 3. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.549 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.553 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.556 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 2. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.559 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.563 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:170] num_consumers] Repeat operator, no parent node, assuming it's root and returning 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.566 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 1. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.569 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.573 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:80] CreateConnector] Creating connector in tree operator: 0. Producer: 1. Consumer: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.576 [mindspore/ccsrc/dataset/engine/datasetops/dataset_op.cc:88] CreateConnector] Bypassed connector creation for tree operator: 0. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.587 [mindspore/ccsrc/dataset/engine/execution_tree.cc:121] Launch] Printing the tree before launch tasks: +Execution tree summary: +----------------------- ++- ( 0) : [workers: 0 (inlined)] [repeats: 5] + +- ( 1) : [workers: 0 (inlined)] [repeats: 4] + +- ( 2) : [workers: 0 (inlined)] [repeats: 3] + +- ( 3) : [workers: 0 (inlined)] [repeats: 2] + +- ( 4) : [workers: 1] + +Execution tree operator details: +-------------------------------- +( 0) : +Number of children : 1 + Child[0] id: 1 +Number of parents : 0 +Connector queue size : 0 +Operator control flags : 0x00000000 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 5 +Leaf Nodes in execution path: + Operator: 1 + +( 1) : +Number of children : 1 + Child[0] id: 2 +Number of parents : 1 + Parent[0] id: 0 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 4 +Leaf Nodes in execution path: + Operator: 2 + +( 2) : +Number of children : 1 + Child[0] id: 3 +Number of parents : 1 + Parent[0] id: 1 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 3 +Leaf Nodes in execution path: + Operator: 3 + +( 3) : +Number of children : 1 + Child[0] id: 4 +Number of parents : 1 + Parent[0] id: 2 +Connector queue size : 0 +Operator control flags : 0x00000001 +Num workers: 0 (inlined) +Current repeat count: 0 +Max repeat count: 2 +Leaf Nodes in execution path: + Operator: 4 + +( 4) : +Number of children : 0 +Number of parents : 1 + Parent[0] id: 3 +Connector queue size : 16 +Operator control flags : 0x00000001 +Num workers: 1 +Column names: + + data + + +[INFO] ME(31040,python):2020-05-11-11:13:21.427.627 [mindspore/ccsrc/dataset/util/task.cc:30] operator()] Op launched, OperatorId:4 Thread ID 140076361406208 Started. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.690 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.736 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.783 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.818 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.824 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.864 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.898 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.939 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.973 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.427.978 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.017 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.051 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.092 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.125 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.131 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.134 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.170 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.204 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.242 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.276 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.281 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.317 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.351 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.391 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.426 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.431 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.467 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.500 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.538 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.572 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.577 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.580 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.617 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.650 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.688 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.722 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.727 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.763 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.797 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.835 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.868 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.874 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.910 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.944 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.428.982 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.015 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.021 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.024 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.061 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.095 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.133 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.167 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.172 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.208 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.242 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.279 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.312 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.318 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.353 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.387 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.424 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.457 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.462 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.465 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.469 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.514 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.551 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.594 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.628 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.634 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.670 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.703 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.741 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.774 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.779 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.815 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.849 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.887 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.921 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.926 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.929 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.966 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.429.999 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.037 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.069 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.075 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.111 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.144 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.181 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.214 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.219 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.256 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.290 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.327 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.361 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.366 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.369 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.405 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.439 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.477 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.510 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.516 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.552 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.585 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.622 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.655 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.661 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.697 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.730 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.768 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.800 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.809 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.812 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.848 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.882 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.920 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.953 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.959 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.430.995 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.028 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.065 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.098 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.104 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.139 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.173 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.210 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.243 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.249 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.252 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.255 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.291 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.326 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.364 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.398 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.403 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.440 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.474 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.511 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.544 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.549 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.585 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.618 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.656 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.689 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.694 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.698 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.733 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.766 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.804 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.837 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.842 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.878 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.912 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.950 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.984 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.431.992 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.028 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.062 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.100 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.133 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.138 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.142 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.177 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.211 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.249 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.282 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.287 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.323 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.356 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.393 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.426 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.431 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.467 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.499 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.537 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.570 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.576 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.581 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.617 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.651 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.688 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.721 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.727 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.765 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.798 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.836 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.868 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.874 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.910 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.943 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.432.981 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.014 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.019 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.023 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.026 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.061 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.095 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.133 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.166 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.179 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.215 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.249 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.286 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.319 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.325 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.360 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.394 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.431 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.465 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.470 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.473 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.516 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.552 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.591 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.625 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.630 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.666 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.700 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.737 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.770 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.776 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.814 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.848 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.886 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.918 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.924 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.927 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.963 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.433.996 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.034 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.067 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.072 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.108 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.142 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.179 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.212 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.218 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.253 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.287 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.325 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.359 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.364 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.367 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.405 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.440 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.478 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.512 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.517 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.553 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.587 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.624 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.657 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.662 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.698 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.731 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.768 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.801 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.807 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.810 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.813 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.848 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.882 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.920 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.954 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.959 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.434.997 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.032 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.069 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.103 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.108 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.144 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.177 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.215 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.248 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.253 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.257 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.292 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.326 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.363 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.397 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.402 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.438 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.471 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.508 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.541 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.547 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.585 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.618 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.656 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.689 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.694 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.698 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.733 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.766 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.804 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.837 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.842 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.878 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.912 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.949 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.982 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.435.987 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.023 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.057 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.094 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.128 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.133 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.137 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.175 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.210 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.247 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.281 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.287 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.322 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.356 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.393 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.426 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.431 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.467 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.501 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 1. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.538 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:224] operator()] Generator operator sends out EOE. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.544 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:229] operator()] Generator operator sends out EOF. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.547 [mindspore/ccsrc/dataset/engine/datasetops/source/generator_op.cc:232] operator()] Generator operator main execution loop complete. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.572 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 2. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.577 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 3. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.581 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 4. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.584 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:132] EoeReceived] Repeat operator end of epoch message received. Repeat count is now: 5. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.587 [mindspore/ccsrc/dataset/engine/dataset_iterator.cc:100] FetchNextTensorRow] End of data iteration. Fetch eof and then return empty row. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.591 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.594 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.597 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.603 [mindspore/ccsrc/dataset/engine/datasetops/repeat_op.cc:164] EofReceived] Repeat operator EOF received, do nothing now. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.614 [mindspore/ccsrc/dataset/util/task.cc:117] Join] Op launched, OperatorId:4 Thread ID 140076361406208 Stopped. +[INFO] ME(31040,python):2020-05-11-11:13:21.436.625 [mindspore/ccsrc/dataset/util/intrp_service.cc:27] ~IntrpService] Number of registered resources is 6. + +test_nested_repeat10 --- passed +. +test_nested_repeat11 --- passed + + +============================== 14 passed in 1.07s ============================== +[INFO] ME(31135,python):2020-05-11-11:13:21.722.210 [mindspore/ccsrc/parallel/costmodel_context.cc:30] GetInstance] Create costmodel_context +[INFO] ME(31135,python):2020-05-11-11:13:21.957.718 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is ApplyMomentum, outindex is 0, in_index is 0 +[INFO] ME(31135,python):2020-05-11-11:13:21.958.252 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is Adam, outindex is 0, in_index is 0 +[INFO] ME(31135,python):2020-05-11-11:13:21.958.263 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is Adam, outindex is 1, in_index is 1 +[INFO] ME(31135,python):2020-05-11-11:13:21.958.268 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is Adam, outindex is 2, in_index is 2 +[INFO] ME(31135,python):2020-05-11-11:13:21.958.989 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is AssignAdd, outindex is 0, in_index is 0 +[INFO] ME(31135,python):2020-05-11-11:13:21.973.276 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is BNTrainingUpdate, outindex is 1, in_index is 5 +[INFO] ME(31135,python):2020-05-11-11:13:21.973.292 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is BNTrainingUpdate, outindex is 2, in_index is 6 +[INFO] ME(31135,python):2020-05-11-11:13:21.995.917 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is FusedMulApplyMomentum, outindex is 0, in_index is 0 +[INFO] ME(31135,python):2020-05-11-11:13:21.998.905 [mindspore/ccsrc/kernel/oplib/oplib.cc:288] GetRefInfo] add ref info, op name is ScatterNdUpdate, outindex is 0, in_index is 0 +ERROR: usage: pytest [options] [file_or_dir] [file_or_dir] [...] +pytest: error: unrecognized arguments: -n --dist=loadfile dataset/test_repeat.py/parallel dataset/test_repeat.py/train + inifile: None + rootdir: /home/anzhengqi/workspace/mindspore + diff --git a/tests/ut/python/parallel/test_auto_parallel_resnet.py b/tests/ut/python/parallel/test_auto_parallel_resnet.py index fdba571e70..fccd4c190b 100644 --- a/tests/ut/python/parallel/test_auto_parallel_resnet.py +++ b/tests/ut/python/parallel/test_auto_parallel_resnet.py @@ -274,6 +274,9 @@ class DatasetLenet(): def get_repeat_count(self): return 1 + def create_tuple_iterator(self): + return self + def test_train_32k_8p(batch_size=32, num_classes=32768): dev_num = 8 diff --git a/tests/ut/python/parallel/test_bias_add.py b/tests/ut/python/parallel/test_bias_add.py index 321810b1ae..573efde125 100644 --- a/tests/ut/python/parallel/test_bias_add.py +++ b/tests/ut/python/parallel/test_bias_add.py @@ -61,6 +61,9 @@ class DatasetLenet(): def get_repeat_count(self): return 1 + def create_tuple_iterator(self): + return self + class Net(nn.Cell): def __init__(self): diff --git a/tests/ut/python/parallel/test_gather_v2_primitive.py b/tests/ut/python/parallel/test_gather_v2_primitive.py index 8aa093a24e..e6f269e2db 100644 --- a/tests/ut/python/parallel/test_gather_v2_primitive.py +++ b/tests/ut/python/parallel/test_gather_v2_primitive.py @@ -58,6 +58,9 @@ class Dataset(): def get_repeat_count(self): return 1 + def create_tuple_iterator(self): + return self + class GatherV2(_Loss): def __init__(self, index_dim, strategy, index_size=16): diff --git a/tests/ut/python/train/test_dataset_helper.py b/tests/ut/python/train/test_dataset_helper.py new file mode 100644 index 0000000000..6540adfe12 --- /dev/null +++ b/tests/ut/python/train/test_dataset_helper.py @@ -0,0 +1,107 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +"""test dataset helper.""" + +import pytest +import numpy as np +import mindspore.context as context +from mindspore.communication.management import init +from mindspore.train.dataset_helper import DatasetHelper +from ....dataset_mock import MindData + + +def get_dataset(batch_size=1): + dataset_types = (np.int32, np.int32, np.int32, np.int32, np.int32, np.int32, np.int32) + dataset_shapes = ((batch_size, 128), (batch_size, 128), (batch_size, 128), (batch_size, 1), + (batch_size, 20), (batch_size, 20), (batch_size, 20)) + + dataset = MindData(size=2, batch_size=batch_size, np_types=dataset_types, + output_shapes=dataset_shapes, input_indexs=(0, 1)) + return dataset + + +def test_dataset_helper_dataset_sink_mode_str(): + dataset = get_dataset(32) + with pytest.raises(TypeError): + DatasetHelper(dataset, dataset_sink_mode="True") + + +def test_dataset_helper_dataset_sink_mode_int(): + dataset = get_dataset(32) + with pytest.raises(TypeError): + DatasetHelper(dataset, dataset_sink_mode=1) + + +def test_dataset_helper_sink_size_bool(): + dataset = get_dataset(32) + with pytest.raises(TypeError): + DatasetHelper(dataset, dataset_sink_mode=True, sink_size=True) + + +def test_dataset_helper_sink_size_float(): + dataset = get_dataset(32) + with pytest.raises(TypeError): + DatasetHelper(dataset, dataset_sink_mode=True, sink_size=1.0) + + +def test_dataset_helper_sink_size_negative(): + dataset = get_dataset(32) + with pytest.raises(ValueError): + DatasetHelper(dataset, dataset_sink_mode=True, sink_size=-2) + + +def test_dataset_iter_normal(): + dataset = get_dataset(32) + dataset_helper = DatasetHelper(dataset, dataset_sink_mode=False) + count = 0 + for _ in range(2): + for _ in dataset_helper: + count += 1 + dataset.reset() + assert count == 6 + + +@pytest.mark.skipif('not context.get_context("enable_ge")') +def test_dataset_iter_ge(): + init() + dataset = get_dataset(32) + dataset_helper = DatasetHelper(dataset, dataset_sink_mode=True, sink_size=10) + count = 0 + for _ in range(2): + for _ in dataset_helper: + count += 1 + assert count == 2 + + +@pytest.mark.skipif('context.get_context("enable_ge")') +def test_dataset_iter_ms_loop_sink(): + init() + context.set_context(enable_loop_sink=True) + dataset = get_dataset(32) + dataset_helper = DatasetHelper(dataset, dataset_sink_mode=True, sink_size=10) + count = 0 + for _ in range(2): + for inputs in dataset_helper: + count += 1 + assert inputs == tuple() + assert count == 2 + + +@pytest.mark.skipif('context.get_context("enable_ge")') +def test_dataset_iter_ms(): + init() + context.set_context(enable_loop_sink=False) + dataset = get_dataset(32) + DatasetHelper(dataset, dataset_sink_mode=True, sink_size=10)