!44019 format minddata log

Merge pull request !44019 from luoyang/err_struct
This commit is contained in:
i-robot 2022-11-04 03:16:18 +00:00 committed by Gitee
commit 2d2bdb4c78
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
84 changed files with 607 additions and 192 deletions

View File

@ -118,8 +118,10 @@ class MS_API Status {
inline std::string ToString() const; inline std::string ToString() const;
int GetLineOfCode() const; int GetLineOfCode() const;
inline std::string GetFileName() const;
inline std::string GetErrDescription() const; inline std::string GetErrDescription() const;
inline std::string SetErrDescription(const std::string &err_description); inline std::string SetErrDescription(const std::string &err_description);
inline void SetStatusMsg(const std::string &status_msg);
MS_API friend std::ostream &operator<<(std::ostream &os, const Status &s); MS_API friend std::ostream &operator<<(std::ostream &os, const Status &s);
@ -144,8 +146,10 @@ class MS_API Status {
Status(enum StatusCode status_code, const std::vector<char> &status_msg); Status(enum StatusCode status_code, const std::vector<char> &status_msg);
Status(enum StatusCode code, int line_of_code, const char *file_name, const std::vector<char> &extra); Status(enum StatusCode code, int line_of_code, const char *file_name, const std::vector<char> &extra);
std::vector<char> ToCString() const; std::vector<char> ToCString() const;
std::vector<char> GetFileNameChar() const;
std::vector<char> GetErrDescriptionChar() const; std::vector<char> GetErrDescriptionChar() const;
std::vector<char> SetErrDescription(const std::vector<char> &err_description); std::vector<char> SetErrDescription(const std::vector<char> &err_description);
void SetStatusMsgChar(const std::vector<char> &status_msg);
static std::vector<char> CodeAsCString(enum StatusCode c); static std::vector<char> CodeAsCString(enum StatusCode c);
struct Data; struct Data;
@ -157,10 +161,12 @@ Status::Status(enum StatusCode status_code, const std::string &status_msg)
Status::Status(const enum StatusCode code, int line_of_code, const char *file_name, const std::string &extra) Status::Status(const enum StatusCode code, int line_of_code, const char *file_name, const std::string &extra)
: Status(code, line_of_code, file_name, StringToChar(extra)) {} : Status(code, line_of_code, file_name, StringToChar(extra)) {}
std::string Status::ToString() const { return CharToString(ToCString()); } std::string Status::ToString() const { return CharToString(ToCString()); }
std::string Status::GetFileName() const { return CharToString(GetFileNameChar()); }
std::string Status::GetErrDescription() const { return CharToString(GetErrDescriptionChar()); } std::string Status::GetErrDescription() const { return CharToString(GetErrDescriptionChar()); }
std::string Status::SetErrDescription(const std::string &err_description) { std::string Status::SetErrDescription(const std::string &err_description) {
return CharToString(SetErrDescription(StringToChar(err_description))); return CharToString(SetErrDescription(StringToChar(err_description)));
} }
void Status::SetStatusMsg(const std::string &status_msg) { SetStatusMsgChar(StringToChar(status_msg)); }
std::string Status::CodeAsString(enum StatusCode c) { return CharToString(CodeAsCString(c)); } std::string Status::CodeAsString(enum StatusCode c) { return CharToString(CodeAsCString(c)); }
} // namespace mindspore } // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_STATUS_H #endif // MINDSPORE_INCLUDE_API_STATUS_H

View File

@ -76,6 +76,11 @@ else()
endif() endif()
endif() endif()
set(MINDDATA_LOG_ADAPTER_SRC ${CMAKE_SOURCE_DIR}/mindspore/ccsrc/minddata/dataset/util/md_log_adapter.cc)
add_library(md_log_adapter_obj OBJECT ${MINDDATA_LOG_ADAPTER_SRC})
add_library(md_log_adapter STATIC $<TARGET_OBJECTS:md_log_adapter_obj>)
target_link_libraries(md_log_adapter mindspore_core)
################## Include sub-modules ############################### ################## Include sub-modules ###############################
add_subdirectory(util) add_subdirectory(util)
add_subdirectory(core) add_subdirectory(core)
@ -200,6 +205,8 @@ endif()
set(dataengine_submodules ${dataengine_submodules} CACHE INTERNAL "_c_dataengine objects") set(dataengine_submodules ${dataengine_submodules} CACHE INTERNAL "_c_dataengine objects")
add_library(_c_dataengine SHARED ${dataengine_submodules}) add_library(_c_dataengine SHARED ${dataengine_submodules})
add_dependencies(_c_dataengine md_log_adapter)
target_link_libraries(_c_dataengine PRIVATE md_log_adapter)
if(ENABLE_PYTHON) if(ENABLE_PYTHON)
set_target_properties(_c_dataengine PROPERTIES set_target_properties(_c_dataengine PROPERTIES
PREFIX "${PYTHON_MODULE_PREFIX}" PREFIX "${PYTHON_MODULE_PREFIX}"

View File

@ -25,15 +25,16 @@
#include "pybind11/pybind11.h" #include "pybind11/pybind11.h"
#include "pybind11/stl.h" #include "pybind11/stl.h"
#include "minddata/dataset/util/md_log_adapter.h"
namespace py = pybind11; namespace py = pybind11;
namespace mindspore { namespace mindspore {
namespace dataset { namespace dataset {
#define THROW_IF_ERROR(s) \ #define THROW_IF_ERROR(s) \
do { \ do { \
Status rc = std::move(s); \ Status rc = std::move(s); \
if (rc.IsError()) throw std::runtime_error(rc.ToString()); \ if (rc.IsError()) throw std::runtime_error(MDLogAdapter::Apply(&rc).ToString()); \
} while (false) } while (false)
using PybindDefineFunc = std::function<void(py::module *)>; using PybindDefineFunc = std::function<void(py::module *)>;

View File

@ -62,7 +62,7 @@ Status CpuMapJob::RebuildMapErrorMsg(const TensorRow &input_row, const size_t &i
std::string abbr_op_name = op_name.substr(0, op_name.length() - 2); std::string abbr_op_name = op_name.substr(0, op_name.length() - 2);
err_msg += "map operation: [" + abbr_op_name + "] failed. "; err_msg += "map operation: [" + abbr_op_name + "] failed. ";
if (input_row.getPath().size() > 0 && !input_row.getPath()[0].empty()) { if (input_row.getPath().size() > 0 && !input_row.getPath()[0].empty()) {
err_msg += "The corresponding data files: " + input_row.getPath()[0]; err_msg += "The corresponding data file is: " + input_row.getPath()[0];
if (input_row.getPath().size() > 1) { if (input_row.getPath().size() > 1) {
std::set<std::string> path_set; std::set<std::string> path_set;
path_set.insert(input_row.getPath()[0]); path_set.insert(input_row.getPath()[0]);
@ -80,6 +80,9 @@ Status CpuMapJob::RebuildMapErrorMsg(const TensorRow &input_row, const size_t &i
err_msg += "Error description:\n"; err_msg += "Error description:\n";
} }
err_msg += tensor_err_msg; err_msg += tensor_err_msg;
if (abbr_op_name == "PyFunc") {
RETURN_STATUS_ERROR(StatusCode::kMDPyFuncException, err_msg);
}
rc->SetErrDescription(err_msg); rc->SetErrDescription(err_msg);
return *rc; return *rc;
} }

View File

@ -201,12 +201,29 @@ Status GeneratorOp::operator()() {
generator_counter_++; generator_counter_++;
} catch (py::error_already_set &e) { } catch (py::error_already_set &e) {
eoe = e.matches(PyExc_StopIteration); eoe = e.matches(PyExc_StopIteration);
// Restore exception to python
e.restore();
// Pop up non StopIteration Python Exception // Pop up non StopIteration Python Exception
if (!eoe) { if (!eoe) {
RETURN_STATUS_ERROR(StatusCode::kMDPyFuncException, e.what()); std::string traceback;
try {
// Construct python-like traceback
py::list tb = py::module::import("traceback").attr("format_tb")(e.trace());
traceback = "Traceback (most recent call last):\n";
for (auto t : tb) {
traceback += py::reinterpret_borrow<py::str>(t);
}
traceback += e.what();
} catch (std::exception &) {
// Back to original exception
traceback = e.what();
}
// Restore exception to python
e.restore();
RETURN_STATUS_ERROR(StatusCode::kMDPyFuncException, traceback);
} }
// Restore exception to python
e.restore();
if (num_rows_sampled != -1 && num_rows_sampled != generator_counter_) { if (num_rows_sampled != -1 && num_rows_sampled != generator_counter_) {
if (generator_counter_ == 0) { if (generator_counter_ == 0) {
std::string msg = std::string msg =

View File

@ -1,3 +1,4 @@
file(GLOB_RECURSE _CURRENT_SRC_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} "*.cc") file(GLOB_RECURSE _CURRENT_SRC_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} "*.cc")
list(REMOVE_ITEM _CURRENT_SRC_FILES "md_log_adapter.cc")
set_property(SOURCE ${_CURRENT_SRC_FILES} PROPERTY COMPILE_DEFINITIONS SUBMODULE_ID=mindspore::SubModuleId::SM_MD) set_property(SOURCE ${_CURRENT_SRC_FILES} PROPERTY COMPILE_DEFINITIONS SUBMODULE_ID=mindspore::SubModuleId::SM_MD)
add_library(utils OBJECT ${_CURRENT_SRC_FILES}) add_library(utils OBJECT ${_CURRENT_SRC_FILES})

View File

@ -0,0 +1,78 @@
/**
* Copyright 2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "minddata/dataset/util/md_log_adapter.h"
#include "minddata/dataset/util/status.h"
#include <sstream>
namespace mindspore {
namespace dataset {
Status MDLogAdapter::Apply(Status *rc) {
std::string status_msg = ConstructMsg(rc->StatusCode(), rc->CodeAsString(rc->StatusCode()), "", rc->GetLineOfCode(),
rc->GetFileName(), rc->GetErrDescription());
rc->SetStatusMsg(status_msg);
return *rc;
}
std::string MDLogAdapter::ConstructMsg(const enum StatusCode &status_code, const std::string &code_as_string,
const std::string &status_msg, const int line_of_code,
const std::string &file_name, const std::string &err_description) {
std::ostringstream ss;
std::string kSplitLine = std::string(66, '-') + "\n";
std::string err_ori = err_description;
/// Python Runtime Error
ss << code_as_string << ". \n\n";
/// Python Stack
std::string user_err;
std::string user_stack;
if (status_code == StatusCode::kMDPyFuncException) {
std::string at_stack = "\n\nAt:\n";
if (err_ori.find(at_stack) != std::string::npos) {
user_stack = err_ori.substr(0, err_ori.find(at_stack));
user_err = "Execute user Python code failed, check 'Python Call Stack' above.";
ss << kSplitLine << "- Python Call Stack: \n" << kSplitLine;
ss << user_stack << "\n\n";
} else {
user_err = err_ori;
}
}
/// Summary Message
ss << kSplitLine << "- Dataset Pipeline Error Message: \n" << kSplitLine;
if (!user_err.empty()) {
ss << "[ERROR] " + user_err + "\n\n";
} else {
user_err = err_description;
if (*user_err.rbegin() != '.') {
user_err += '.';
}
ss << "[ERROR] " + user_err + "\n\n";
}
/// C++ Stack
if (!file_name.empty()) {
ss << kSplitLine << "- C++ Call Stack: (For framework developers) \n" << kSplitLine;
std::string cpp_trace = std::string(file_name) + "(" + std::to_string(line_of_code) + ").\n";
ss << cpp_trace << "\n\n";
}
return ss.str();
}
} // namespace dataset
} // namespace mindspore

View File

@ -0,0 +1,41 @@
/**
* Copyright 2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_MINDDATA_DATASET_MD_LOG_ADAPTER_H_
#define MINDSPORE_CCSRC_MINDDATA_DATASET_MD_LOG_ADAPTER_H_
#include <iostream>
#include <string>
#include <utility>
#include "include/api/status.h"
namespace mindspore {
namespace dataset {
class MDLogAdapter {
public:
MDLogAdapter() = default;
~MDLogAdapter() = default;
static Status Apply(Status *rc);
static std::string ConstructMsg(const enum StatusCode &status_code, const std::string &code_as_string,
const std::string &status_msg, const int line_of_code, const std::string &file_name,
const std::string &err_description);
};
} // namespace dataset
} // namespace mindspore
#endif // MINDSPORE_CCSRC_MINDDATA_DATASET_MD_LOG_ADAPTER_H

View File

@ -218,8 +218,10 @@ void TaskManager::InterruptMaster(const Status &rc) {
master->caught_severe_exception_ = true; master->caught_severe_exception_ = true;
// Move log error here for some scenarios didn't call GetMasterThreadRc // Move log error here for some scenarios didn't call GetMasterThreadRc
if (master->rc_.StatusCode() != mindspore::StatusCode::kMDPyFuncException) { if (master->rc_.StatusCode() != mindspore::StatusCode::kMDPyFuncException) {
#ifndef ENABLE_PYTHON
// use python operation, the error had been raised in python layer. So disable log prompt here. // use python operation, the error had been raised in python layer. So disable log prompt here.
MS_LOG(ERROR) << "Task is terminated with err msg (more details are in info level logs): " << master->rc_; MS_LOG(ERROR) << "Task is terminated with err msg (more details are in info level logs): " << master->rc_;
#endif
} }
} }
} }

View File

@ -34,6 +34,7 @@ endif()
set_property(SOURCE ${DIR_LIB_SRCS} PROPERTY COMPILE_DEFINITIONS SUBMODULE_ID=mindspore::SubModuleId::SM_MD) set_property(SOURCE ${DIR_LIB_SRCS} PROPERTY COMPILE_DEFINITIONS SUBMODULE_ID=mindspore::SubModuleId::SM_MD)
add_library(mindrecord_obj OBJECT ${DIR_LIB_SRCS}) add_library(mindrecord_obj OBJECT ${DIR_LIB_SRCS})
add_library(_c_mindrecord SHARED $<TARGET_OBJECTS:mindrecord_obj>) add_library(_c_mindrecord SHARED $<TARGET_OBJECTS:mindrecord_obj>)
add_dependencies(_c_mindrecord md_log_adapter)
set_target_properties(_c_mindrecord PROPERTIES set_target_properties(_c_mindrecord PROPERTIES
PREFIX "${PYTHON_MODULE_PREFIX}" PREFIX "${PYTHON_MODULE_PREFIX}"
@ -48,6 +49,7 @@ else()
mindspore::protobuf) mindspore::protobuf)
endif() endif()
target_link_libraries(_c_mindrecord PRIVATE mindspore_core) target_link_libraries(_c_mindrecord PRIVATE mindspore_core)
target_link_libraries(_c_mindrecord PRIVATE md_log_adapter)
if(USE_GLOG) if(USE_GLOG)
target_link_libraries(_c_mindrecord PRIVATE mindspore::glog) target_link_libraries(_c_mindrecord PRIVATE mindspore::glog)
else() else()

View File

@ -18,6 +18,7 @@
#include <vector> #include <vector>
#include "utils/ms_utils.h" #include "utils/ms_utils.h"
#include "minddata/dataset/util/md_log_adapter.h"
#include "minddata/mindrecord/include/common/log_adapter.h" #include "minddata/mindrecord/include/common/log_adapter.h"
#include "minddata/mindrecord/include/common/shard_utils.h" #include "minddata/mindrecord/include/common/shard_utils.h"
#include "minddata/mindrecord/include/shard_error.h" #include "minddata/mindrecord/include/shard_error.h"
@ -30,13 +31,14 @@
#include "pybind11/stl.h" #include "pybind11/stl.h"
namespace py = pybind11; namespace py = pybind11;
using mindspore::dataset::MDLogAdapter;
namespace mindspore { namespace mindspore {
namespace mindrecord { namespace mindrecord {
#define THROW_IF_ERROR(s) \ #define THROW_IF_ERROR(s) \
do { \ do { \
Status rc = std::move(s); \ Status rc = std::move(s); \
if (rc.IsError()) throw std::runtime_error(rc.ToString()); \ if (rc.IsError()) throw std::runtime_error(MDLogAdapter::Apply(&rc).ToString()); \
} while (false) } while (false)
void BindSchema(py::module *m) { void BindSchema(py::module *m) {

View File

@ -26,6 +26,7 @@
#endif #endif
#include <map> #include <map>
#include <sstream> #include <sstream>
#include <iostream>
namespace mindspore { namespace mindspore {
struct Status::Data { struct Status::Data {
@ -45,7 +46,7 @@ static std::map<enum StatusCode, std::string> status_info_map = {
{kMDShapeMisMatch, "Shape is incorrect"}, {kMDShapeMisMatch, "Shape is incorrect"},
{kMDInterrupted, "Interrupted system call"}, {kMDInterrupted, "Interrupted system call"},
{kMDNoSpace, "No space left on device"}, {kMDNoSpace, "No space left on device"},
{kMDPyFuncException, "Exception thrown from PyFunc"}, {kMDPyFuncException, "Exception thrown from user defined Python function in dataset"},
{kMDDuplicateKey, "Duplicate key"}, {kMDDuplicateKey, "Duplicate key"},
{kMDPythonInterpreterFailure, ""}, {kMDPythonInterpreterFailure, ""},
{kMDTDTPushFailure, "Unexpected error"}, {kMDTDTPushFailure, "Unexpected error"},
@ -58,7 +59,7 @@ static std::map<enum StatusCode, std::string> status_info_map = {
{kMDBuddySpaceFull, "BuddySpace full"}, {kMDBuddySpaceFull, "BuddySpace full"},
{kMDNetWorkError, "Network error"}, {kMDNetWorkError, "Network error"},
{kMDNotImplementedYet, "Unexpected error"}, {kMDNotImplementedYet, "Unexpected error"},
{kMDUnexpectedError, "Unexpected error"}, {kMDUnexpectedError, "Exception thrown from dataset pipeline. Refer to 'Dataset Pipeline Error Message'"},
// ME // ME
{kMEFailed, "Common error code."}, {kMEFailed, "Common error code."},
{kMEInvalidInput, "Invalid input."}, {kMEInvalidInput, "Invalid input."},
@ -157,6 +158,13 @@ int Status::GetLineOfCode() const {
return data_->line_of_code; return data_->line_of_code;
} }
std::vector<char> Status::GetFileNameChar() const {
if (data_ == nullptr) {
return std::vector<char>();
}
return StringToChar(data_->file_name);
}
std::vector<char> Status::GetErrDescriptionChar() const { std::vector<char> Status::GetErrDescriptionChar() const {
if (data_ == nullptr) { if (data_ == nullptr) {
return std::vector<char>(); return std::vector<char>();
@ -204,6 +212,13 @@ std::vector<char> Status::SetErrDescription(const std::vector<char> &err_descrip
return StringToChar(data_->status_msg); return StringToChar(data_->status_msg);
} }
void Status::SetStatusMsgChar(const std::vector<char> &status_msg) {
if (data_ == nullptr) {
return;
}
data_->status_msg = CharToString(status_msg);
}
bool Status::operator==(const Status &other) const { bool Status::operator==(const Status &other) const {
if (data_ == nullptr && other.data_ == nullptr) { if (data_ == nullptr && other.data_ == nullptr) {
return true; return true;

View File

@ -1,2 +1,2 @@
Note: This is the mindspore Lite inference framework size threshold. Offline review is required before modify this value!!! Note: This is the mindspore Lite inference framework size threshold. Offline review is required before modify this value!!!
1097000 1100096

View File

@ -1,2 +1,2 @@
Note: This is the mindspore Lite inference framework size threshold. Modifying this threshold requires meeting review. Note: This is the mindspore Lite inference framework size threshold. Modifying this threshold requires meeting review.
1097000 1100096

View File

@ -53,9 +53,10 @@ class ExceptionHandler:
def reraise(self): def reraise(self):
"""Reraise the caught exception in the main thread/process""" """Reraise the caught exception in the main thread/process"""
# Error message like: "Caught ValueError in GeneratorDataset worker process. Original Traceback:". # Find the last traceback which is more useful to user.
err_msg = "Caught {} {}.\nOriginal {}".format( index = [i for i in range(len(self.except_msg)) if self.except_msg.startswith('Traceback', i)]
self.except_type.__name__, self.where, self.except_msg) err_msg = "{}".format(self.except_msg[index[-1]:]).strip()
if self.except_type == KeyError: if self.except_type == KeyError:
# As KeyError will call its repr() function automatically, which makes stack info hard to read. # As KeyError will call its repr() function automatically, which makes stack info hard to read.
err_msg = KeyErrorParse(err_msg) err_msg = KeyErrorParse(err_msg)

View File

@ -760,7 +760,7 @@ def check_tensor_op(param, param_name):
def check_c_tensor_op(param, param_name): def check_c_tensor_op(param, param_name):
"""check whether param is a tensor op or a callable Python function but not a py_transform""" """check whether param is a tensor op or a callable Python function but not a py_transform"""
if callable(param) and str(param).find("py_transform") >= 0: if callable(param) and str(param).find("py_transform") >= 0:
raise TypeError("{0} is a py_transform op which is not allow to use.".format(param_name)) raise TypeError("{0} is a py_transform op which is not allowed to use.".format(param_name))
if not isinstance(param, cde.TensorOp) and not callable(param) and not getattr(param, 'parse', None): if not isinstance(param, cde.TensorOp) and not callable(param) and not getattr(param, 'parse', None):
raise TypeError("{0} is neither a c_transform op (TensorOperation) nor a callable pyfunc.".format(param_name)) raise TypeError("{0} is neither a c_transform op (TensorOperation) nor a callable pyfunc.".format(param_name))

View File

@ -266,7 +266,7 @@ class FileWriter:
if not isinstance(raw_data, list): if not isinstance(raw_data, list):
raise ParamTypeError('raw_data', 'list') raise ParamTypeError('raw_data', 'list')
if self._flush and not self._append: if self._flush and not self._append:
raise RuntimeError("Unexpected error. Not allow to call `write_raw_data` on flushed MindRecord files." \ raise RuntimeError("Not allowed to call `write_raw_data` on flushed MindRecord files." \
"When creating new Mindrecord files, please remove `commit` before `write_raw_data`." \ "When creating new Mindrecord files, please remove `commit` before `write_raw_data`." \
"In other cases, when appending to existing MindRecord files, " \ "In other cases, when appending to existing MindRecord files, " \
"please call `open_for_append` first and then `write_raw_data`.") "please call `open_for_append` first and then `write_raw_data`.")

View File

@ -283,7 +283,8 @@ set(ut_objects ${CORE_OBJECT_LIST} $<TARGET_OBJECTS:_ut_ut_obj> $<TARGET_OBJECTS
$<TARGET_OBJECTS:_mindspore_common_obj>) $<TARGET_OBJECTS:_mindspore_common_obj>)
if(ENABLE_MINDDATA) if(ENABLE_MINDDATA)
set(ut_objects ${ut_objects} ${dataengine_submodules} $<TARGET_OBJECTS:mindrecord_obj>) set(ut_objects ${ut_objects} ${dataengine_submodules} $<TARGET_OBJECTS:mindrecord_obj>
$<TARGET_OBJECTS:md_log_adapter_obj>)
endif() endif()
add_executable(ut_tests ${ut_objects}) add_executable(ut_tests ${ut_objects})

View File

@ -184,7 +184,7 @@ class TestAutotuneWithProfiler:
with pytest.raises(RuntimeError) as excinfo: with pytest.raises(RuntimeError) as excinfo:
md_profiler.init() md_profiler.init()
assert "Unexpected error. Stop MD Autotune before initializing the MD Profiler." in str(excinfo.value) assert "Stop MD Autotune before initializing the MD Profiler." in str(excinfo.value)
@staticmethod @staticmethod
def test_autotune_simple_pipeline(): def test_autotune_simple_pipeline():

View File

@ -648,7 +648,7 @@ def test_cache_map_failure11():
num_iter = 0 num_iter = 0
for _ in ds1.create_dict_iterator(num_epochs=1): for _ in ds1.create_dict_iterator(num_epochs=1):
num_iter += 1 num_iter += 1
assert "Unexpected error. Server is not set up with spill support" in str( assert "Server is not set up with spill support" in str(
e.value) e.value)
assert num_iter == 0 assert num_iter == 0

View File

@ -131,7 +131,7 @@ def test_ag_news_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.AGNewsDataset(FILE_DIR, usage='test', shuffle=False) data = ds.AGNewsDataset(FILE_DIR, usage='test', shuffle=False)
@ -140,7 +140,7 @@ def test_ag_news_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.AGNewsDataset(FILE_DIR, usage='test', shuffle=False) data = ds.AGNewsDataset(FILE_DIR, usage='test', shuffle=False)
@ -149,7 +149,7 @@ def test_ag_news_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -165,7 +165,7 @@ def test_amazon_review_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.AmazonReviewDataset(FULL_DIR, usage='test', shuffle=False) data = ds.AmazonReviewDataset(FULL_DIR, usage='test', shuffle=False)
@ -174,7 +174,7 @@ def test_amazon_review_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.AmazonReviewDataset(FULL_DIR, usage='test', shuffle=False) data = ds.AmazonReviewDataset(FULL_DIR, usage='test', shuffle=False)
@ -183,7 +183,7 @@ def test_amazon_review_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
def test_amazon_review_dataset_pipeline(): def test_amazon_review_dataset_pipeline():

View File

@ -158,7 +158,7 @@ def test_celeba_dataset_exception_file_path():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.CelebADataset(DATA_DIR, shuffle=False) data = ds.CelebADataset(DATA_DIR, shuffle=False)
@ -168,7 +168,7 @@ def test_celeba_dataset_exception_file_path():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.CelebADataset(DATA_DIR, shuffle=False) data = ds.CelebADataset(DATA_DIR, shuffle=False)
@ -177,7 +177,7 @@ def test_celeba_dataset_exception_file_path():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
def test_celeba_sampler_exception(): def test_celeba_sampler_exception():

View File

@ -487,7 +487,7 @@ def test_cifar_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.Cifar10Dataset(DATA_DIR_10) data = ds.Cifar10Dataset(DATA_DIR_10)
@ -497,7 +497,7 @@ def test_cifar_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.Cifar100Dataset(DATA_DIR_100) data = ds.Cifar100Dataset(DATA_DIR_100)
@ -507,7 +507,7 @@ def test_cifar_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.Cifar100Dataset(DATA_DIR_100) data = ds.Cifar100Dataset(DATA_DIR_100)
@ -517,7 +517,7 @@ def test_cifar_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.Cifar100Dataset(DATA_DIR_100) data = ds.Cifar100Dataset(DATA_DIR_100)
@ -527,7 +527,7 @@ def test_cifar_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
def test_cifar10_pk_sampler_get_dataset_size(): def test_cifar10_pk_sampler_get_dataset_size():

View File

@ -236,7 +236,7 @@ def test_cityscapes_exception():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files:" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is:" in str(e)
try: try:
data = ds.CityscapesDataset(DATASET_DIR, usage=usage, quality_mode=quality_mode, task=task) data = ds.CityscapesDataset(DATASET_DIR, usage=usage, quality_mode=quality_mode, task=task)
@ -246,7 +246,7 @@ def test_cityscapes_exception():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files:" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is:" in str(e)
def test_cityscapes_param(): def test_cityscapes_param():

View File

@ -409,7 +409,7 @@ def test_clue_exception_file_path():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.CLUEDataset(train_file, task='AFQMC', usage='train') data = ds.CLUEDataset(train_file, task='AFQMC', usage='train')
@ -418,7 +418,7 @@ def test_clue_exception_file_path():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.CLUEDataset(train_file, task='AFQMC', usage='train') data = ds.CLUEDataset(train_file, task='AFQMC', usage='train')
@ -427,7 +427,7 @@ def test_clue_exception_file_path():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -147,7 +147,7 @@ def test_cmu_arctic_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
data = ds.CMUArcticDataset(DATA_DIR) data = ds.CMUArcticDataset(DATA_DIR)
data = data.map(operations=exception_func, input_columns=["waveform"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["waveform"], num_parallel_workers=1)

View File

@ -429,7 +429,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection") data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection")
@ -439,7 +439,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection") data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection")
@ -448,7 +448,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection") data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection")
@ -457,7 +457,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff") data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
@ -466,7 +466,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff") data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
@ -476,7 +476,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff") data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
@ -485,7 +485,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff") data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
@ -494,7 +494,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint") data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
@ -503,7 +503,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint") data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
@ -513,7 +513,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint") data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
@ -522,7 +522,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint") data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
@ -531,7 +531,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic") data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
@ -540,7 +540,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic") data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
@ -550,7 +550,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic") data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
@ -559,7 +559,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic") data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
@ -568,7 +568,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic") data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
@ -577,7 +577,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=CAPTIONS_FILE, task="Captioning") data1 = ds.CocoDataset(DATA_DIR, annotation_file=CAPTIONS_FILE, task="Captioning")
@ -586,7 +586,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data1 = ds.CocoDataset(DATA_DIR, annotation_file=CAPTIONS_FILE, task="Captioning") data1 = ds.CocoDataset(DATA_DIR, annotation_file=CAPTIONS_FILE, task="Captioning")
@ -595,7 +595,7 @@ def test_coco_case_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -324,7 +324,7 @@ def test_conll2000_dataset_exceptions():
data = data.map(operations=exception_func, input_columns=["word"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["word"], num_parallel_workers=1)
for _ in data.__iter__(): for _ in data.__iter__():
pass pass
assert "map operation: [PyFunc] failed. The corresponding data files" in str(error_info.value) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(error_info.value)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -325,7 +325,7 @@ def test_csv_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.CSVDataset( data = ds.CSVDataset(
@ -338,7 +338,7 @@ def test_csv_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.CSVDataset( data = ds.CSVDataset(
@ -351,7 +351,7 @@ def test_csv_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.CSVDataset( data = ds.CSVDataset(
@ -364,7 +364,7 @@ def test_csv_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
def test_csv_dataset_duplicate_columns(): def test_csv_dataset_duplicate_columns():

View File

@ -114,7 +114,7 @@ def test_dbpedia_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.DBpediaDataset(DATA_DIR, usage="test", shuffle=False) data = ds.DBpediaDataset(DATA_DIR, usage="test", shuffle=False)
data = data.map(operations=exception_func, input_columns=["content"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["content"], num_parallel_workers=1)
@ -122,7 +122,7 @@ def test_dbpedia_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -232,7 +232,7 @@ def test_div2k_exception():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files:" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is:" in str(e)
try: try:
data = ds.DIV2KDataset(DATASET_DIR, usage=usage, downgrade=downgrade, scale=scale) data = ds.DIV2KDataset(DATASET_DIR, usage=usage, downgrade=downgrade, scale=scale)
@ -242,7 +242,7 @@ def test_div2k_exception():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files:" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is:" in str(e)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -359,7 +359,7 @@ def test_emnist_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
data = ds.EMnistDataset(DATA_DIR, "mnist", "train") data = ds.EMnistDataset(DATA_DIR, "mnist", "train")
data = data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)

View File

@ -279,7 +279,7 @@ def test_enwik9_dataset_exceptions():
data = data.map(operations=exception_func, input_columns=["text"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["text"], num_parallel_workers=1)
for _ in data.__iter__(): for _ in data.__iter__():
pass pass
assert "map operation: [PyFunc] failed. The corresponding data files" in str(error_info.value) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(error_info.value)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -228,7 +228,7 @@ def test_fashion_mnist_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
data = ds.FashionMnistDataset(DATA_DIR) data = ds.FashionMnistDataset(DATA_DIR)
data = data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)

View File

@ -153,7 +153,7 @@ def test_flickr30k_dataset_exception():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.FlickrDataset(FLICKR30K_DATASET_DIR, FLICKR30K_ANNOTATION_FILE_1, decode=True) data = ds.FlickrDataset(FLICKR30K_DATASET_DIR, FLICKR30K_ANNOTATION_FILE_1, decode=True)
@ -163,7 +163,7 @@ def test_flickr30k_dataset_exception():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -150,7 +150,7 @@ def test_gtzan_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
data = ds.GTZANDataset(DATA_DIR) data = ds.GTZANDataset(DATA_DIR)

View File

@ -876,7 +876,7 @@ def test_imagefolder_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.ImageFolderDataset(DATA_DIR) data = ds.ImageFolderDataset(DATA_DIR)
@ -887,7 +887,7 @@ def test_imagefolder_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.ImageFolderDataset(DATA_DIR) data = ds.ImageFolderDataset(DATA_DIR)
@ -897,7 +897,7 @@ def test_imagefolder_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
data_dir_invalid = "../data/dataset/testPK" data_dir_invalid = "../data/dataset/testPK"
try: try:

View File

@ -686,7 +686,7 @@ def test_imdb_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.IMDBDataset(DATA_DIR) data = ds.IMDBDataset(DATA_DIR)
@ -697,7 +697,7 @@ def test_imdb_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
data_dir_invalid = "../data/dataset/IMDBDATASET" data_dir_invalid = "../data/dataset/IMDBDATASET"
try: try:

View File

@ -119,7 +119,7 @@ def test_iwslt2016_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.IWSLT2016Dataset(DATA_IWSLT2016_DIR, usage='train', language_pair=["de", "en"], shuffle=False) data = ds.IWSLT2016Dataset(DATA_IWSLT2016_DIR, usage='train', language_pair=["de", "en"], shuffle=False)
@ -128,7 +128,7 @@ def test_iwslt2016_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
def test_iwslt2017_dataset_basic(): def test_iwslt2017_dataset_basic():
@ -231,7 +231,7 @@ def test_iwslt2017_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.IWSLT2017Dataset(DATA_IWSLT2017_DIR, usage='train', language_pair=["de", "en"], shuffle=False) data = ds.IWSLT2017Dataset(DATA_IWSLT2017_DIR, usage='train', language_pair=["de", "en"], shuffle=False)
@ -240,7 +240,7 @@ def test_iwslt2017_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -236,7 +236,7 @@ def test_kmnist_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
data = ds.KMnistDataset(DATA_DIR) data = ds.KMnistDataset(DATA_DIR)
data = data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)

View File

@ -149,7 +149,7 @@ def test_libri_tts_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
data = ds.LibriTTSDataset(DATA_DIR) data = ds.LibriTTSDataset(DATA_DIR)
data = data.map(operations=exception_func, input_columns=["waveform"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["waveform"], num_parallel_workers=1)

View File

@ -124,7 +124,7 @@ def test_lj_speech_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
data = ds.LJSpeechDataset(DATA_DIR) data = ds.LJSpeechDataset(DATA_DIR)
data = data.map(operations=exception_func, input_columns=["waveform"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["waveform"], num_parallel_workers=1)

View File

@ -561,7 +561,7 @@ def test_lsun_exception_map():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.LSUNDataset(DATA_DIR) data = ds.LSUNDataset(DATA_DIR)
@ -573,7 +573,7 @@ def test_lsun_exception_map():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.LSUNDataset(DATA_DIR) data = ds.LSUNDataset(DATA_DIR)
@ -583,7 +583,7 @@ def test_lsun_exception_map():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -179,7 +179,7 @@ def test_manifest_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.ManifestDataset(DATA_FILE) data = ds.ManifestDataset(DATA_FILE)
@ -189,7 +189,7 @@ def test_manifest_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.ManifestDataset(DATA_FILE) data = ds.ManifestDataset(DATA_FILE)
@ -198,7 +198,7 @@ def test_manifest_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
NO_SOURCE_DATA_FILE = "../data/dataset/testManifestData/invalidNoSource.manifest" NO_SOURCE_DATA_FILE = "../data/dataset/testManifestData/invalidNoSource.manifest"
try: try:

View File

@ -217,7 +217,7 @@ def test_mnist_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
data = ds.MnistDataset(DATA_DIR) data = ds.MnistDataset(DATA_DIR)
data = data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)

View File

@ -453,7 +453,7 @@ def test_omniglot_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str( assert "map operation: [PyFunc] failed. The corresponding data file is" in str(
e) e)
try: try:
@ -466,7 +466,7 @@ def test_omniglot_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.OmniglotDataset(DATA_DIR) data = ds.OmniglotDataset(DATA_DIR)
@ -476,7 +476,7 @@ def test_omniglot_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -361,7 +361,7 @@ def test_penn_treebank_dataset_exceptions():
data = data.map(operations=exception_func, input_columns=["text"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["text"], num_parallel_workers=1)
for _ in data.__iter__(): for _ in data.__iter__():
pass pass
assert "map operation: [PyFunc] failed. The corresponding data files" in str(error_info.value) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(error_info.value)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -273,7 +273,7 @@ def test_qmnist_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
data = ds.QMnistDataset(DATA_DIR, "train", True) data = ds.QMnistDataset(DATA_DIR, "train", True)
data = data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)

View File

@ -232,7 +232,7 @@ def test_sbu_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
dataset = ds.SBUDataset(DATA_DIR, decode=True) dataset = ds.SBUDataset(DATA_DIR, decode=True)
dataset = dataset.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1) dataset = dataset.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)

View File

@ -207,7 +207,7 @@ def test_semeion_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.SemeionDataset(DATA_DIR_SEMEION) data = ds.SemeionDataset(DATA_DIR_SEMEION)
@ -217,7 +217,7 @@ def test_semeion_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
def test_semeion_pipeline(): def test_semeion_pipeline():

View File

@ -153,7 +153,7 @@ def test_sogou_news_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.SogouNewsDataset(DATA_SOGOU_NEWS_DIR, usage='test', shuffle=False) data = ds.SogouNewsDataset(DATA_SOGOU_NEWS_DIR, usage='test', shuffle=False)
@ -162,7 +162,7 @@ def test_sogou_news_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.SogouNewsDataset(DATA_SOGOU_NEWS_DIR, usage='test', shuffle=False) data = ds.SogouNewsDataset(DATA_SOGOU_NEWS_DIR, usage='test', shuffle=False)
@ -171,7 +171,7 @@ def test_sogou_news_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -124,7 +124,7 @@ def test_speech_commands_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files." error_msg_8 = "The corresponding data file is."
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
data = ds.SpeechCommandsDataset(DATA_DIR) data = ds.SpeechCommandsDataset(DATA_DIR)
data = data.map(operations=exception_func, input_columns=["waveform"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["waveform"], num_parallel_workers=1)

View File

@ -205,7 +205,7 @@ def test_squad_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" \ assert "map operation: [PyFunc] failed. The corresponding data file is" \
in str(e) in str(e)
try: try:
@ -216,7 +216,7 @@ def test_squad_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" \ assert "map operation: [PyFunc] failed. The corresponding data file is" \
in str(e) in str(e)
try: try:
@ -227,7 +227,7 @@ def test_squad_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" \ assert "map operation: [PyFunc] failed. The corresponding data file is" \
in str(e) in str(e)
try: try:
@ -238,7 +238,7 @@ def test_squad_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" \ assert "map operation: [PyFunc] failed. The corresponding data file is" \
in str(e) in str(e)

View File

@ -305,7 +305,7 @@ def test_stl10_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
all_data = ds.STL10Dataset(DATA_DIR, "all") all_data = ds.STL10Dataset(DATA_DIR, "all")
all_data = all_data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1) all_data = all_data.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)

View File

@ -195,7 +195,7 @@ def test_tedlium_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.TedliumDataset(DATA_DIR_TEDLIUM_RELEASE12, RELEASE1) data = ds.TedliumDataset(DATA_DIR_TEDLIUM_RELEASE12, RELEASE1)
@ -205,7 +205,7 @@ def test_tedlium_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.TedliumDataset(DATA_DIR_TEDLIUM_RELEASE12, RELEASE2) data = ds.TedliumDataset(DATA_DIR_TEDLIUM_RELEASE12, RELEASE2)
@ -215,7 +215,7 @@ def test_tedlium_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.TedliumDataset(DATA_DIR_TEDLIUM_RELEASE12, RELEASE2) data = ds.TedliumDataset(DATA_DIR_TEDLIUM_RELEASE12, RELEASE2)
@ -225,7 +225,7 @@ def test_tedlium_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.TedliumDataset(DATA_DIR_TEDLIUM_RELEASE3, RELEASE3) data = ds.TedliumDataset(DATA_DIR_TEDLIUM_RELEASE3, RELEASE3)
@ -235,7 +235,7 @@ def test_tedlium_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.TedliumDataset(DATA_DIR_TEDLIUM_RELEASE3, RELEASE3) data = ds.TedliumDataset(DATA_DIR_TEDLIUM_RELEASE3, RELEASE3)
@ -245,7 +245,7 @@ def test_tedlium_exception_file_path():
num_rows += 1 num_rows += 1
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
def test_tedlium_extensions(): def test_tedlium_extensions():

View File

@ -327,7 +327,7 @@ def test_textline_dataset_exceptions():
data = data.map(operations=exception_func, input_columns=["text"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["text"], num_parallel_workers=1)
for _ in data.__iter__(): for _ in data.__iter__():
pass pass
assert "map operation: [PyFunc] failed. The corresponding data files" in str(error_info.value) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(error_info.value)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -437,7 +437,7 @@ def test_tfrecord_exception():
data = data.map(operations=exception_func, input_columns=["col_1d"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["col_1d"], num_parallel_workers=1)
for _ in data.__iter__(): for _ in data.__iter__():
pass pass
assert "map operation: [PyFunc] failed. The corresponding data files" in str(info.value) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(info.value)
with pytest.raises(RuntimeError) as info: with pytest.raises(RuntimeError) as info:
schema = ds.Schema() schema = ds.Schema()
@ -448,7 +448,7 @@ def test_tfrecord_exception():
data = data.map(operations=exception_func, input_columns=["col_2d"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["col_2d"], num_parallel_workers=1)
for _ in data.__iter__(): for _ in data.__iter__():
pass pass
assert "map operation: [PyFunc] failed. The corresponding data files" in str(info.value) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(info.value)
with pytest.raises(RuntimeError) as info: with pytest.raises(RuntimeError) as info:
schema = ds.Schema() schema = ds.Schema()
@ -459,7 +459,7 @@ def test_tfrecord_exception():
data = data.map(operations=exception_func, input_columns=["col_3d"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["col_3d"], num_parallel_workers=1)
for _ in data.__iter__(): for _ in data.__iter__():
pass pass
assert "map operation: [PyFunc] failed. The corresponding data files" in str(info.value) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(info.value)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -311,7 +311,7 @@ def test_udpos_dataset_exceptions():
data = data.map(operations=exception_func, input_columns=["word"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["word"], num_parallel_workers=1)
for _ in data.__iter__(): for _ in data.__iter__():
pass pass
assert "map operation: [PyFunc] failed. The corresponding data files" in str(error_info.value) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(error_info.value)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -296,7 +296,7 @@ def test_voc_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.VOCDataset(DATA_DIR, task="Detection", usage="train", shuffle=False) data = ds.VOCDataset(DATA_DIR, task="Detection", usage="train", shuffle=False)
@ -306,7 +306,7 @@ def test_voc_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.VOCDataset(DATA_DIR, task="Detection", usage="train", shuffle=False) data = ds.VOCDataset(DATA_DIR, task="Detection", usage="train", shuffle=False)
@ -315,7 +315,7 @@ def test_voc_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.VOCDataset(DATA_DIR, task="Detection", usage="train", shuffle=False) data = ds.VOCDataset(DATA_DIR, task="Detection", usage="train", shuffle=False)
@ -324,7 +324,7 @@ def test_voc_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.VOCDataset(DATA_DIR, task="Detection", usage="train", shuffle=False) data = ds.VOCDataset(DATA_DIR, task="Detection", usage="train", shuffle=False)
@ -333,7 +333,7 @@ def test_voc_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.VOCDataset(DATA_DIR, task="Segmentation", usage="train", shuffle=False) data = ds.VOCDataset(DATA_DIR, task="Segmentation", usage="train", shuffle=False)
@ -342,7 +342,7 @@ def test_voc_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.VOCDataset(DATA_DIR, task="Segmentation", usage="train", shuffle=False) data = ds.VOCDataset(DATA_DIR, task="Segmentation", usage="train", shuffle=False)
@ -352,7 +352,7 @@ def test_voc_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.VOCDataset(DATA_DIR, task="Segmentation", usage="train", shuffle=False) data = ds.VOCDataset(DATA_DIR, task="Segmentation", usage="train", shuffle=False)
@ -361,7 +361,7 @@ def test_voc_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.VOCDataset(DATA_DIR, task="Segmentation", usage="train", shuffle=False) data = ds.VOCDataset(DATA_DIR, task="Segmentation", usage="train", shuffle=False)
@ -371,7 +371,7 @@ def test_voc_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
def test_voc_num_classes(): def test_voc_num_classes():

View File

@ -205,7 +205,7 @@ def test_wider_face_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
# usage = all # usage = all
try: try:
@ -215,7 +215,7 @@ def test_wider_face_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False) data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False)
@ -224,7 +224,7 @@ def test_wider_face_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False) data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False)
@ -233,7 +233,7 @@ def test_wider_face_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False) data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False)
@ -242,7 +242,7 @@ def test_wider_face_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False) data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False)
@ -251,7 +251,7 @@ def test_wider_face_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False) data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False)
@ -260,7 +260,7 @@ def test_wider_face_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False) data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False)
@ -269,7 +269,7 @@ def test_wider_face_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False) data = ds.WIDERFaceDataset(DATA_DIR, shuffle=False)
@ -278,7 +278,7 @@ def test_wider_face_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -370,7 +370,7 @@ def test_wiki_text_dataset_exceptions():
data = data.map(operations=exception_func, input_columns=["text"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["text"], num_parallel_workers=1)
for _ in data.__iter__(): for _ in data.__iter__():
pass pass
assert "map operation: [PyFunc] failed. The corresponding data files" in str(error_info.value) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(error_info.value)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -122,7 +122,7 @@ def test_yahoo_answers_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.YahooAnswersDataset(DATA_DIR, usage="test", shuffle=False) data = ds.YahooAnswersDataset(DATA_DIR, usage="test", shuffle=False)
data = data.map(operations=exception_func, input_columns=["content"], num_parallel_workers=1) data = data.map(operations=exception_func, input_columns=["content"], num_parallel_workers=1)
@ -130,7 +130,7 @@ def test_yahoo_answers_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -129,7 +129,7 @@ def test_yelp_review_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
try: try:
data = ds.YelpReviewDataset(DATA_POLARITY_DIR, usage='test', shuffle=False) data = ds.YelpReviewDataset(DATA_POLARITY_DIR, usage='test', shuffle=False)
@ -138,7 +138,7 @@ def test_yelp_review_dataset_exception():
pass pass
assert False assert False
except RuntimeError as e: except RuntimeError as e:
assert "map operation: [PyFunc] failed. The corresponding data files" in str(e) assert "map operation: [PyFunc] failed. The corresponding data file is" in str(e)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -143,7 +143,7 @@ def test_yes_no_exception():
def exception_func(item): def exception_func(item):
raise Exception("Error occur!") raise Exception("Error occur!")
error_msg_8 = "The corresponding data files" error_msg_8 = "The corresponding data file is"
with pytest.raises(RuntimeError, match=error_msg_8): with pytest.raises(RuntimeError, match=error_msg_8):
data = ds.YesNoDataset(DATA_DIR) data = ds.YesNoDataset(DATA_DIR)
data = data.map(operations=exception_func, input_columns=[ data = data.map(operations=exception_func, input_columns=[

View File

@ -540,7 +540,7 @@ def test_eager_invalid_image_cutout():
assert error_msg in str(error_info.value) assert error_msg in str(error_info.value)
my_input = np.random.randn(60, 50) my_input = np.random.randn(60, 50)
test_config(my_input, RuntimeError, "Unexpected error. CutOut: shape is invalid.") test_config(my_input, RuntimeError, "CutOut: shape is invalid.")
test_config(1, TypeError, "Input should be NumPy or PIL image, got <class 'int'>.") test_config(1, TypeError, "Input should be NumPy or PIL image, got <class 'int'>.")
test_config(1.0, TypeError, "Input should be NumPy or PIL image, got <class 'float'>.") test_config(1.0, TypeError, "Input should be NumPy or PIL image, got <class 'float'>.")

View File

@ -98,7 +98,7 @@ def test_five_crop_error_msg():
for _ in data: for _ in data:
pass pass
error_msg = \ error_msg = \
"Unexpected error. map operation: [ToTensor] failed. The op is OneToOne, can only accept one tensor as input." "map operation: [ToTensor] failed. The op is OneToOne, can only accept one tensor as input."
assert error_msg in str(info.value) assert error_msg in str(info.value)

View File

@ -0,0 +1,239 @@
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import numpy as np
import mindspore.dataset as ds
import mindspore.dataset.vision as vision
def test_generator_single_worker_exception():
"""
Feature: Formatted exception.
Description: Test formatted exception in GeneratorDataset scenario with one worker.
Expectation: Python stack and summary message can be found in exception log.
"""
class Gen():
def __init__(self):
self.data = [1, 2, 3, 4]
def __getitem__(self, index):
data = self.data[index]
return data/0
def __len__(self):
return 4
dataset = ds.GeneratorDataset(Gen(), ["image"], shuffle=False, num_parallel_workers=1)
try:
for data in dataset.create_dict_iterator(output_numpy=True, num_epochs=1):
print(data["image"].shape)
assert False
except RuntimeError as e:
assert "Exception thrown from user defined Python function in dataset" in str(e)
assert "Python Call Stack" in str(e)
assert "Traceback (most recent call last):" in str(e)
assert "ZeroDivisionError: division by zero" in str(e)
assert "Dataset Pipeline Error Message:" in str(e)
def test_generator_multi_workers_exception():
"""
Feature: Formatted exception.
Description: Test formatted exception in GeneratorDataset scenario with multi-workers.
Expectation: Python stack and summary message can be found in exception log.
"""
def pyfunc(image):
return image
class Gen():
def __init__(self):
self.data = [[1], [2], [3], [4]]
def __getitem__(self, index):
image = Image.open(index)
return image
def __len__(self):
return 4
dataset = ds.GeneratorDataset(Gen(), ["image"], shuffle=False, num_parallel_workers=2)
dataset = dataset.map(operations=pyfunc, input_columns=["image"])
try:
for data in dataset.create_dict_iterator(output_numpy=True, num_epochs=1):
print(data["image"].shape)
assert False
except RuntimeError as e:
assert "Exception thrown from user defined Python function in dataset" in str(e)
assert "Python Call Stack" in str(e)
assert "Traceback (most recent call last):" in str(e)
assert "NameError: name 'Image' is not defined" in str(e)
assert "Dataset Pipeline Error Message:" in str(e)
def test_batch_operator_exception():
"""
Feature: Formatted exception.
Description: Test formatted exception in batch operator scenario.
Expectation: Python stack and summary message can be found in exception log.
"""
class Gen():
def __init__(self):
self.data = [np.ones((2)), np.ones((2)), np.ones((2)), np.ones((2, 3))]
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return 4
dataset = ds.GeneratorDataset(Gen(), ["image"], shuffle=False)
dataset = dataset.batch(2)
try:
for data in dataset.create_dict_iterator(output_numpy=True, num_epochs=1):
print(data["image"].shape)
assert False
except RuntimeError as e:
assert "Exception thrown from dataset pipeline. Refer to 'Dataset Pipeline Error Message'" in str(e)
assert "Python Call Stack" not in str(e)
assert "C++ Call Stack: (For framework developers)" in str(e)
def test_batch_operator_with_pyfunc_exception():
"""
Feature: Formatted exception.
Description: Test formatted exception in batch operator with pyfunc scenario.
Expectation: Python stack and summary message can be found in exception log.
"""
class Gen():
def __init__(self):
self.data = [np.ones((2)), np.ones((2)), np.ones((2)), np.ones((2))]
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return 4
def batch_func(col, batch_info):
zero = 0
fake_data = 1/zero
return np.ones((3)), np.array(fake_data)
dataset = ds.GeneratorDataset(Gen(), ["image"], shuffle=False)
dataset = dataset.batch(2, per_batch_map=batch_func, input_columns=["image"])
try:
for data in dataset.create_dict_iterator(output_numpy=True, num_epochs=1):
print(data["image"].shape)
assert False
except RuntimeError as e:
assert "Exception thrown from user defined Python function in dataset" in str(e)
assert "Python Call Stack" in str(e)
assert "Traceback (most recent call last):" in str(e)
assert "in batch_func" in str(e)
assert "Dataset Pipeline Error Message:" in str(e)
def test_map_operator_with_c_ops_and_multiprocessing_exception():
"""
Feature: Formatted exception.
Description: Test formatted exception in map operator with c ops scenario.
Expectation: Python stack and summary message can be found in exception log.
"""
class Gen():
def __init__(self):
self.data = [np.ones((10, 10, 3)),
np.ones((15, 15, 3)),
np.ones((5, 5, 3))]
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return 3
dataset = ds.GeneratorDataset(Gen(), ["image"], shuffle=False, num_parallel_workers=2)
dataset = dataset.map(operations=vision.RandomCrop((8, 8)), input_columns=["image"], num_parallel_workers=2)
try:
for data in dataset.create_dict_iterator(output_numpy=True, num_epochs=1):
print(data["image"].shape)
assert False
except RuntimeError as e:
assert "Shape is incorrect" in str(e)
assert "Python Call Stack" not in str(e)
assert "Dataset Pipeline Error Message:" in str(e)
def test_map_operator_with_pyfunc_and_multithreading_exception():
"""
Feature: Formatted exception.
Description: Test formatted exception in map operator with pyfunc scenario.
Expectation: Python stack and summary message can be found in exception log.
"""
def pyfunc(image):
a = 1
b = 0
c = a/b
return c
class Gen():
def __init__(self):
self.data = [[1], [2], [3], [4]]
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return 4
dataset = ds.GeneratorDataset(Gen(), ["image"], shuffle=False, num_parallel_workers=2)
dataset = dataset.map(operations=pyfunc, input_columns=["image"], num_parallel_workers=2)
try:
for data in dataset.create_dict_iterator(output_numpy=True, num_epochs=1):
print(data["image"].shape)
assert False
except RuntimeError as e:
assert "Exception thrown from user defined Python function in dataset" in str(e)
assert "Python Call Stack" in str(e)
assert "Traceback (most recent call last):" in str(e)
assert "Dataset Pipeline Error Message:" in str(e)
def test_map_operator_with_pyfunc_and_multiprocessing_exception():
"""
Feature: Formatted exception.
Description: Test formatted exception in map operator with pyfunc scenario.
Expectation: Python stack and summary message can be found in exception log.
"""
def pyfunc(image):
a = 1
b = 0
c = a/b
return c
class Gen():
def __init__(self):
self.data = [[1], [2], [3], [4]]
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return 4
dataset = ds.GeneratorDataset(Gen(), ["image"], shuffle=False, num_parallel_workers=1)
dataset = dataset.map(operations=pyfunc, input_columns=["image"], num_parallel_workers=2,
python_multiprocessing=True)
try:
for data in dataset.create_dict_iterator(output_numpy=True, num_epochs=1):
print(data["image"].shape)
assert False
except RuntimeError as e:
assert "Exception thrown from user defined Python function in dataset" in str(e)
assert "Python Call Stack" in str(e)
assert "Traceback (most recent call last):" in str(e)
assert "in pyfunc" in str(e)
assert "Dataset Pipeline Error Message:" in str(e)

View File

@ -154,7 +154,7 @@ def test_griffin_lim_pipeline_invalid_param_constraint():
data1 = ds.NumpySlicesDataset(in_data, column_names=["multi_dimensional_data"], shuffle=False) data1 = ds.NumpySlicesDataset(in_data, column_names=["multi_dimensional_data"], shuffle=False)
with pytest.raises(RuntimeError, with pytest.raises(RuntimeError,
match=r"Unexpected error. map operation: \[GriffinLim\] failed. " + match=r"map operation: \[GriffinLim\] failed. " +
r"GriffinLim: the frequency of the input should equal to n_fft / 2 \+ 1"): r"GriffinLim: the frequency of the input should equal to n_fft / 2 \+ 1"):
transforms = [c_audio.GriffinLim(n_fft=100)] transforms = [c_audio.GriffinLim(n_fft=100)]
data1 = data1.map(operations=transforms, input_columns=["multi_dimensional_data"]) data1 = data1.map(operations=transforms, input_columns=["multi_dimensional_data"])
@ -162,7 +162,7 @@ def test_griffin_lim_pipeline_invalid_param_constraint():
_ = item["multi_dimensional_data"] _ = item["multi_dimensional_data"]
with pytest.raises(RuntimeError, with pytest.raises(RuntimeError,
match=r"Unexpected error. map operation: \[GriffinLim\] failed. " + match=r"map operation: \[GriffinLim\] failed. " +
r"GriffinLim: the frequency of the input should equal to n_fft / 2 \+ 1"): r"GriffinLim: the frequency of the input should equal to n_fft / 2 \+ 1"):
transforms = [c_audio.GriffinLim(n_fft=300, n_iter=10, win_length=0, hop_length=120)] transforms = [c_audio.GriffinLim(n_fft=300, n_iter=10, win_length=0, hop_length=120)]
data1 = data1.map(operations=transforms, input_columns=["multi_dimensional_data"]) data1 = data1.map(operations=transforms, input_columns=["multi_dimensional_data"])
@ -170,7 +170,7 @@ def test_griffin_lim_pipeline_invalid_param_constraint():
_ = item["multi_dimensional_data"] _ = item["multi_dimensional_data"]
with pytest.raises(RuntimeError, with pytest.raises(RuntimeError,
match=r"Syntax error. GriffinLim: momentum equal to or greater than 1 can be unstable, " + match=r"GriffinLim: momentum equal to or greater than 1 can be unstable, " +
"but got: 1.000000"): "but got: 1.000000"):
transforms = [c_audio.GriffinLim(n_fft=300, n_iter=10, win_length=0, hop_length=0, power=2, momentum=1)] transforms = [c_audio.GriffinLim(n_fft=300, n_iter=10, win_length=0, hop_length=0, power=2, momentum=1)]
data1 = data1.map(operations=transforms, input_columns=["multi_dimensional_data"]) data1 = data1.map(operations=transforms, input_columns=["multi_dimensional_data"])

View File

@ -115,7 +115,6 @@ def test_melscale_fbanks_invalid_input():
logger.info("Test melscale_fbanks with bad input: {0}".format(test_name)) logger.info("Test melscale_fbanks with bad input: {0}".format(test_name))
with pytest.raises(error) as error_info: with pytest.raises(error) as error_info:
audio.melscale_fbanks(n_freqs, f_min, f_max, n_mels, sample_rate, norm, mel_type) audio.melscale_fbanks(n_freqs, f_min, f_max, n_mels, sample_rate, norm, mel_type)
print(error_info)
assert error_msg in str(error_info.value) assert error_msg in str(error_info.value)
test_invalid_input("invalid n_freqs parameter Value", 99999999999, 0, 50, 5, 100, audio.NormType.NONE, test_invalid_input("invalid n_freqs parameter Value", 99999999999, 0, 50, 5, 100, audio.NormType.NONE,

View File

@ -113,7 +113,7 @@ def test_invalid_mindrecord():
f.write('just for test') f.write('just for test')
columns_list = ["data", "file_name", "label"] columns_list = ["data", "file_name", "label"]
num_readers = 4 num_readers = 4
with pytest.raises(RuntimeError, match="Unexpected error. Invalid file, the size of mindrecord file header " with pytest.raises(RuntimeError, match="Invalid file, the size of mindrecord file header "
"is larger than the upper limit."): "is larger than the upper limit."):
data_set = ds.MindDataset(file_name, columns_list, num_readers) data_set = ds.MindDataset(file_name, columns_list, num_readers)
for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True): for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True):
@ -375,21 +375,21 @@ def test_mindrecord_exception():
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0] file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
create_cv_mindrecord(file_name, 1) create_cv_mindrecord(file_name, 1)
columns_list = ["data", "file_name", "label"] columns_list = ["data", "file_name", "label"]
with pytest.raises(RuntimeError, match="The corresponding data files"): with pytest.raises(RuntimeError, match="The corresponding data file is"):
data_set = ds.MindDataset(file_name, columns_list, shuffle=False) data_set = ds.MindDataset(file_name, columns_list, shuffle=False)
data_set = data_set.map(operations=exception_func, input_columns=["data"], data_set = data_set.map(operations=exception_func, input_columns=["data"],
num_parallel_workers=1) num_parallel_workers=1)
num_iter = 0 num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True): for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True):
num_iter += 1 num_iter += 1
with pytest.raises(RuntimeError, match="The corresponding data files"): with pytest.raises(RuntimeError, match="The corresponding data file is"):
data_set = ds.MindDataset(file_name, columns_list, shuffle=False) data_set = ds.MindDataset(file_name, columns_list, shuffle=False)
data_set = data_set.map(operations=exception_func, input_columns=["file_name"], data_set = data_set.map(operations=exception_func, input_columns=["file_name"],
num_parallel_workers=1) num_parallel_workers=1)
num_iter = 0 num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True): for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True):
num_iter += 1 num_iter += 1
with pytest.raises(RuntimeError, match="The corresponding data files"): with pytest.raises(RuntimeError, match="The corresponding data file is"):
data_set = ds.MindDataset(file_name, columns_list, shuffle=False) data_set = ds.MindDataset(file_name, columns_list, shuffle=False)
data_set = data_set.map(operations=exception_func, input_columns=["label"], data_set = data_set.map(operations=exception_func, input_columns=["label"],
num_parallel_workers=1) num_parallel_workers=1)

View File

@ -252,7 +252,7 @@ class TestMindDataProfilingStartStop:
# Reissue Start MindData Profiling # Reissue Start MindData Profiling
self.md_profiler.start() self.md_profiler.start()
assert "MD ProfilingManager is already running." in str(info) assert "MD ProfilingManager is already running." in str(info.value)
# Stop MindData Profiling # Stop MindData Profiling
self.md_profiler.stop() self.md_profiler.stop()
@ -290,7 +290,7 @@ class TestMindDataProfilingStartStop:
# Stop MindData Profiling - without prior Start() # Stop MindData Profiling - without prior Start()
self.md_profiler.stop() self.md_profiler.stop()
assert "MD ProfilingManager has not started yet." in str(info) assert "MD ProfilingManager has not started yet." in str(info.value)
# Start MindData Profiling # Start MindData Profiling
self.md_profiler.start() self.md_profiler.start()

View File

@ -1028,19 +1028,19 @@ def test_serdes_not_implemented_op_exception():
vision.Perspective(start_points=[[0, 63], [63, 63], [63, 0], [0, 0]], vision.Perspective(start_points=[[0, 63], [63, 63], [63, 0], [0, 0]],
end_points=[[0, 63], [63, 63], [63, 0], [0, 0]], end_points=[[0, 63], [63, 63], [63, 0], [0, 0]],
interpolation=Inter.BILINEAR)]) interpolation=Inter.BILINEAR)])
assert "Unexpected error. Invalid data, unsupported operation: Perspective" in str(error_info.value) assert "Invalid data, unsupported operation: Perspective" in str(error_info.value)
# Proper to_json and from_json support has not yet been added for AdjustBrightness op # Proper to_json and from_json support has not yet been added for AdjustBrightness op
with pytest.raises(RuntimeError) as error_info: with pytest.raises(RuntimeError) as error_info:
test_config([vision.Decode(), test_config([vision.Decode(),
vision.AdjustBrightness(brightness_factor=2.0)]) vision.AdjustBrightness(brightness_factor=2.0)])
assert "Unexpected error. Invalid data, unsupported operation: AdjustBrightness" in str(error_info.value) assert "Invalid data, unsupported operation: AdjustBrightness" in str(error_info.value)
# Proper to_json and from_json support has not yet been added for AdjustContrast op # Proper to_json and from_json support has not yet been added for AdjustContrast op
with pytest.raises(RuntimeError) as error_info: with pytest.raises(RuntimeError) as error_info:
test_config([vision.Decode(), test_config([vision.Decode(),
vision.AdjustContrast(contrast_factor=2.0)]) vision.AdjustContrast(contrast_factor=2.0)])
assert "Unexpected error. Invalid data, unsupported operation: AdjustContrast" in str(error_info.value) assert "Invalid data, unsupported operation: AdjustContrast" in str(error_info.value)
# Restore configuration # Restore configuration
ds.config.set_seed(original_seed) ds.config.set_seed(original_seed)

View File

@ -413,7 +413,7 @@ def test_spectrogram_param():
_ = audio.Spectrogram(n_fft=100, center=False)(wav) _ = audio.Spectrogram(n_fft=100, center=False)(wav)
except RuntimeError as error: except RuntimeError as error:
logger.info("Got an exception in Spectrogram: {}".format(str(error))) logger.info("Got an exception in Spectrogram: {}".format(str(error)))
assert "Unexpected error. Spectrogram: n_fft should be more than 0 and less than 30," \ assert "Spectrogram: n_fft should be more than 0 and less than 30," \
" but got n_fft: 100." in str(error) " but got n_fft: 100." in str(error)

View File

@ -191,7 +191,7 @@ def test_ten_crop_wrong_img_error_msg():
with pytest.raises(RuntimeError) as info: with pytest.raises(RuntimeError) as info:
data.create_tuple_iterator(num_epochs=1).__next__() data.create_tuple_iterator(num_epochs=1).__next__()
error_msg = \ error_msg = \
"Unexpected error. map operation: [ToTensor] failed. The op is OneToOne, can only accept one tensor as input." "map operation: [ToTensor] failed. The op is OneToOne, can only accept one tensor as input."
assert error_msg in str(info.value) assert error_msg in str(info.value)

View File

@ -124,7 +124,7 @@ def test_minddtaset_generatordataset_exception_01(add_and_remove_cv_file):
for _ in range(num_epochs): for _ in range(num_epochs):
for _ in iter_: for _ in iter_:
num_iter += 1 num_iter += 1
assert 'Unexpected error. Invalid data, column name:' in str(error_info.value) assert 'Invalid data, column name:' in str(error_info.value)
# pylint: disable=redefined-outer-name # pylint: disable=redefined-outer-name
@ -187,7 +187,7 @@ def test_minddtaset_generatordataset_exception_02(add_and_remove_file):
for item in iter_: for item in iter_:
print("item: ", item) print("item: ", item)
num_iter += 1 num_iter += 1
assert 'Unexpected error. Invalid data, column name:' in str(error_info.value) assert 'Invalid data, column name:' in str(error_info.value)
def test_two_level_pipeline_with_multiprocessing(): def test_two_level_pipeline_with_multiprocessing():

View File

@ -386,7 +386,7 @@ def test_cpp_uniform_augment_random_crop_badinput(num_ops=1):
with pytest.raises(RuntimeError) as error_info: with pytest.raises(RuntimeError) as error_info:
for _ in ds1.create_dict_iterator(num_epochs=1, output_numpy=True): for _ in ds1.create_dict_iterator(num_epochs=1, output_numpy=True):
num_batches += 1 num_batches += 1
assert "Shape is incorrect. map operation: [UniformAugment] failed." in str(error_info) assert "map operation: [UniformAugment] failed." in str(error_info.value)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -195,7 +195,7 @@ def test_vectors_invalid_input():
error=RuntimeError, error_msg="invalid file, file is empty.") error=RuntimeError, error_msg="invalid file, file is empty.")
test_invalid_input("the count of `unknown_init`'s element is different with word vector.", test_invalid_input("the count of `unknown_init`'s element is different with word vector.",
DATASET_ROOT_PATH + "vectors.txt", DATASET_ROOT_PATH + "vectors.txt",
error=RuntimeError, error_msg="Unexpected error. ToVectors: " + error=RuntimeError, error_msg="ToVectors: " +
"unk_init must be the same length as vectors, but got unk_init: 2 and vectors: 6", "unk_init must be the same length as vectors, but got unk_init: 2 and vectors: 6",
unk_init=[-1, -1]) unk_init=[-1, -1])
test_invalid_input("The file not exist", DATASET_ROOT_PATH + "not_exist.txt", error=RuntimeError, test_invalid_input("The file not exist", DATASET_ROOT_PATH + "not_exist.txt", error=RuntimeError,

View File

@ -561,7 +561,7 @@ def test_eager_invalid_image_cutout_c():
assert error_msg in str(error_info.value) assert error_msg in str(error_info.value)
my_input = np.random.randn(60, 50) my_input = np.random.randn(60, 50)
test_config(my_input, RuntimeError, "Unexpected error. CutOut: shape is invalid.") test_config(my_input, RuntimeError, "CutOut: shape is invalid.")
test_config(1, TypeError, "Input should be NumPy or PIL image, got <class 'int'>.") test_config(1, TypeError, "Input should be NumPy or PIL image, got <class 'int'>.")
test_config(1.0, TypeError, "Input should be NumPy or PIL image, got <class 'float'>.") test_config(1.0, TypeError, "Input should be NumPy or PIL image, got <class 'float'>.")

View File

@ -266,8 +266,8 @@ def test_map_with_exact_log():
for data in dataset.create_dict_iterator(): for data in dataset.create_dict_iterator():
print(data["data"], data["label"]) print(data["data"], data["label"])
print("-----{}++++".format(info.value), flush=True) print("-----{}++++".format(info.value), flush=True)
assert str(info.value).count("Exception thrown from PyFunc") == 1 assert str(info.value).count("Exception thrown from user defined Python function") == 1
assert str(info.value).count("Caught TypeError in map") == 1 assert str(info.value).count("map operation") == 1
assert str(info.value).count("img should be PIL image") == 1 assert str(info.value).count("img should be PIL image") == 1

View File

@ -261,7 +261,7 @@ def test_to_type_05():
data = data.map(operations=transform, input_columns=["image"]) data = data.map(operations=transform, input_columns=["image"])
for _ in enumerate(data): for _ in enumerate(data):
pass pass
assert "data type" in str(error_info.value) assert "datatype" in str(error_info.value)
def test_to_type_invalid_arg(): def test_to_type_invalid_arg():

View File

@ -135,7 +135,7 @@ def test_cifar10_to_mindrecord_directory(fixture_file):
when destination path is directory. when destination path is directory.
""" """
with pytest.raises(RuntimeError, with pytest.raises(RuntimeError,
match="Unexpected error. Invalid file, mindrecord files already exist. Please check file path:"): match="Invalid file, mindrecord files already exist. Please check file path:"):
cifar10_transformer = Cifar10ToMR(CIFAR10_DIR, CIFAR10_DIR) cifar10_transformer = Cifar10ToMR(CIFAR10_DIR, CIFAR10_DIR)
cifar10_transformer.transform() cifar10_transformer.transform()
@ -146,7 +146,7 @@ def test_cifar10_to_mindrecord_filename_equals_cifar10():
when destination path equals source path. when destination path equals source path.
""" """
with pytest.raises(RuntimeError, with pytest.raises(RuntimeError,
match="Unexpected error. Invalid file, mindrecord files already exist. Please check file path:"): match="Invalid file, mindrecord files already exist. Please check file path:"):
cifar10_transformer = Cifar10ToMR(CIFAR10_DIR, cifar10_transformer = Cifar10ToMR(CIFAR10_DIR,
CIFAR10_DIR + "/data_batch_0") CIFAR10_DIR + "/data_batch_0")
cifar10_transformer.transform() cifar10_transformer.transform()

View File

@ -1229,7 +1229,7 @@ def test_mindrecord_commit_exception_01():
writer.commit() writer.commit()
writer.write_raw_data(data[5:10]) writer.write_raw_data(data[5:10])
assert 'Unexpected error. Not allow to call `write_raw_data` on flushed MindRecord files.' in str(err.value) assert 'Not allowed to call `write_raw_data` on flushed MindRecord files.' in str(err.value)
remove_multi_files(mindrecord_file_name, 4) remove_multi_files(mindrecord_file_name, 4)
@ -1249,7 +1249,7 @@ def test_cv_file_overwrite_exception_01():
"label": {"type": "int64"}, "data": {"type": "bytes"}} "label": {"type": "int64"}, "data": {"type": "bytes"}}
writer.add_schema(cv_schema_json, "img_schema") writer.add_schema(cv_schema_json, "img_schema")
writer.write_raw_data(data) writer.write_raw_data(data)
assert 'Unexpected error. Invalid file, mindrecord files already exist. Please check file path:' in str(err.value) assert 'Invalid file, mindrecord files already exist. Please check file path:' in str(err.value)
remove_multi_files(mindrecord_file_name, FILES_NUM) remove_multi_files(mindrecord_file_name, FILES_NUM)
def test_cv_file_overwrite_exception_02(): def test_cv_file_overwrite_exception_02():
@ -1268,5 +1268,5 @@ def test_cv_file_overwrite_exception_02():
"label": {"type": "int64"}, "data": {"type": "bytes"}} "label": {"type": "int64"}, "data": {"type": "bytes"}}
writer.add_schema(cv_schema_json, "img_schema") writer.add_schema(cv_schema_json, "img_schema")
writer.write_raw_data(data) writer.write_raw_data(data)
assert 'Unexpected error. Invalid file, mindrecord files already exist. Please check file path:' in str(err.value) assert 'Invalid file, mindrecord files already exist. Please check file path:' in str(err.value)
remove_multi_files(mindrecord_file_name, FILES_NUM) remove_multi_files(mindrecord_file_name, FILES_NUM)

View File

@ -256,7 +256,7 @@ def test_invalid_db():
f.write('just for test') f.write('just for test')
with pytest.raises(RuntimeError) as err: with pytest.raises(RuntimeError) as err:
FileReader(file_name) FileReader(file_name)
assert "Unexpected error. Failed to execute the sql [ SELECT NAME from SHARD_NAME; ] " \ assert "Failed to execute the sql [ SELECT NAME from SHARD_NAME; ] " \
"while verifying meta file" in str(err.value) "while verifying meta file" in str(err.value)
remove_file(file_name) remove_file(file_name)
@ -271,7 +271,7 @@ def test_overwrite_invalid_mindrecord():
f.write('just for test') f.write('just for test')
with pytest.raises(RuntimeError) as err: with pytest.raises(RuntimeError) as err:
create_cv_mindrecord(1, file_name) create_cv_mindrecord(1, file_name)
assert 'Unexpected error. Invalid file, mindrecord files already exist. Please check file path:' in str(err.value) assert 'Invalid file, mindrecord files already exist. Please check file path:' in str(err.value)
remove_file(file_name) remove_file(file_name)
def test_overwrite_invalid_db(): def test_overwrite_invalid_db():
@ -285,7 +285,7 @@ def test_overwrite_invalid_db():
f.write('just for test') f.write('just for test')
with pytest.raises(RuntimeError) as err: with pytest.raises(RuntimeError) as err:
create_cv_mindrecord(1, file_name) create_cv_mindrecord(1, file_name)
assert 'Unexpected error. Invalid file, mindrecord files already exist. Please check file path:' in str(err.value) assert 'Invalid file, mindrecord files already exist. Please check file path:' in str(err.value)
remove_file(file_name) remove_file(file_name)
def test_read_after_close(): def test_read_after_close():
@ -382,7 +382,7 @@ def test_mindpage_pageno_pagesize_not_int():
with pytest.raises(ParamValueError): with pytest.raises(ParamValueError):
reader.read_at_page_by_name("822", 0, "qwer") reader.read_at_page_by_name("822", 0, "qwer")
with pytest.raises(RuntimeError, match=r"Unexpected error. Invalid data, " with pytest.raises(RuntimeError, match=r"Invalid data, "
r"category_id: 99999 must be in the range \[0, 10\]."): r"category_id: 99999 must be in the range \[0, 10\]."):
reader.read_at_page_by_id(99999, 0, 1) reader.read_at_page_by_id(99999, 0, 1)
remove_file(file_name) remove_file(file_name)
@ -407,11 +407,11 @@ def test_mindpage_filename_not_exist():
info = reader.read_category_info() info = reader.read_category_info()
logger.info("category info: {}".format(info)) logger.info("category info: {}".format(info))
with pytest.raises(RuntimeError, match=r"Unexpected error. Invalid data, " with pytest.raises(RuntimeError, match=r"Invalid data, "
r"category_id: 9999 must be in the range \[0, 10\]."): r"category_id: 9999 must be in the range \[0, 10\]."):
reader.read_at_page_by_id(9999, 0, 1) reader.read_at_page_by_id(9999, 0, 1)
with pytest.raises(RuntimeError, match="Unexpected error. category_name: abc.jpg could not found."): with pytest.raises(RuntimeError, match="category_name: abc.jpg could not found."):
reader.read_at_page_by_name("abc.jpg", 0, 1) reader.read_at_page_by_name("abc.jpg", 0, 1)
with pytest.raises(ParamValueError): with pytest.raises(ParamValueError):
@ -567,7 +567,7 @@ def test_write_with_invalid_data():
mindrecord_file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0] mindrecord_file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
# field: file_name => filename # field: file_name => filename
with pytest.raises(RuntimeError, match="Unexpected error. Invalid data, " \ with pytest.raises(RuntimeError, match="Invalid data, " \
"the number of schema should be positive but got:"): "the number of schema should be positive but got:"):
remove_one_file(mindrecord_file_name) remove_one_file(mindrecord_file_name)
remove_one_file(mindrecord_file_name + ".db") remove_one_file(mindrecord_file_name + ".db")
@ -603,7 +603,7 @@ def test_write_with_invalid_data():
writer.commit() writer.commit()
# field: data => image # field: data => image
with pytest.raises(RuntimeError, match="Unexpected error. Invalid data, " \ with pytest.raises(RuntimeError, match="Invalid data, " \
"the number of schema should be positive but got:"): "the number of schema should be positive but got:"):
remove_one_file(mindrecord_file_name) remove_one_file(mindrecord_file_name)
remove_one_file(mindrecord_file_name + ".db") remove_one_file(mindrecord_file_name + ".db")
@ -639,7 +639,7 @@ def test_write_with_invalid_data():
writer.commit() writer.commit()
# string type with int value # string type with int value
with pytest.raises(RuntimeError, match="Unexpected error. Invalid data, " \ with pytest.raises(RuntimeError, match="Invalid data, " \
"the number of schema should be positive but got:"): "the number of schema should be positive but got:"):
remove_one_file(mindrecord_file_name) remove_one_file(mindrecord_file_name)
remove_one_file(mindrecord_file_name + ".db") remove_one_file(mindrecord_file_name + ".db")
@ -675,7 +675,7 @@ def test_write_with_invalid_data():
writer.commit() writer.commit()
# field with int64 type, but the real data is string # field with int64 type, but the real data is string
with pytest.raises(RuntimeError, match="Unexpected error. Invalid data, " \ with pytest.raises(RuntimeError, match="Invalid data, " \
"the number of schema should be positive but got:"): "the number of schema should be positive but got:"):
remove_one_file(mindrecord_file_name) remove_one_file(mindrecord_file_name)
remove_one_file(mindrecord_file_name + ".db") remove_one_file(mindrecord_file_name + ".db")
@ -711,7 +711,7 @@ def test_write_with_invalid_data():
writer.commit() writer.commit()
# bytes field is string # bytes field is string
with pytest.raises(RuntimeError, match="Unexpected error. Invalid data, " \ with pytest.raises(RuntimeError, match="Invalid data, " \
"the number of schema should be positive but got:"): "the number of schema should be positive but got:"):
remove_one_file(mindrecord_file_name) remove_one_file(mindrecord_file_name)
remove_one_file(mindrecord_file_name + ".db") remove_one_file(mindrecord_file_name + ".db")
@ -747,7 +747,7 @@ def test_write_with_invalid_data():
writer.commit() writer.commit()
# field is not numpy type # field is not numpy type
with pytest.raises(RuntimeError, match="Unexpected error. Invalid data, " \ with pytest.raises(RuntimeError, match="Invalid data, " \
"the number of schema should be positive but got:"): "the number of schema should be positive but got:"):
remove_one_file(mindrecord_file_name) remove_one_file(mindrecord_file_name)
remove_one_file(mindrecord_file_name + ".db") remove_one_file(mindrecord_file_name + ".db")
@ -783,7 +783,7 @@ def test_write_with_invalid_data():
writer.commit() writer.commit()
# not enough field # not enough field
with pytest.raises(RuntimeError, match="Unexpected error. Invalid data, " \ with pytest.raises(RuntimeError, match="Invalid data, " \
"the number of schema should be positive but got:"): "the number of schema should be positive but got:"):
remove_one_file(mindrecord_file_name) remove_one_file(mindrecord_file_name)
remove_one_file(mindrecord_file_name + ".db") remove_one_file(mindrecord_file_name + ".db")