!17100 clean minddata lint

From: @luoyang42
Reviewed-by: @liucunwei,@jonyguo
Signed-off-by: @liucunwei
This commit is contained in:
mindspore-ci-bot 2021-05-31 11:41:44 +08:00 committed by Gitee
commit 61c96a444f
13 changed files with 55 additions and 11 deletions

View File

@ -73,7 +73,7 @@ Status TransferNode::Build(std::vector<std::shared_ptr<DatasetOp>> *const node_o
queue_name_ = Services::GetUniqueID();
}
// FIXME - This is an issue from MindSpore C++ user
// This is an issue from MindSpore C++ user
// https://gitee.com/mindspore/mindspore/issues/I39J9A
// Link _c_expression.so and _c_dataengine.so simultaneously will cause heap overflow because MindData uses MSContext.
// We should find a new way to get device_type here.

View File

@ -28,7 +28,7 @@
#include "minddata/dataset/kernels/image/math_utils.h"
#include "minddata/dataset/kernels/image/resize_cubic_op.h"
#define MAX_INT_PRECISION 16777216 // float int precision is 16777216
const int32_t MAX_INT_PRECISION = 16777216; // float int precision is 16777216
namespace mindspore {
namespace dataset {
int GetCVInterpolationMode(InterpolationMode mode) {

View File

@ -15,7 +15,7 @@
*/
#include "minddata/dataset/util/services.h"
#include <limits.h>
#include <climits>
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID) && !defined(__APPLE__)
#include <sys/syscall.h>
#else

View File

@ -14,7 +14,7 @@
* limitations under the License.
*/
#include "minddata/dataset/util/sig_handler.h"
#include <signal.h>
#include <csignal>
#if !defined(_WIN32) && !defined(_WIN64) && !defined(__ANDROID__) && !defined(ANDROID) && !defined(__APPLE__)
#endif
#include "minddata/dataset/util/task_manager.h"

View File

@ -16,21 +16,25 @@
enums for exceptions
"""
class BaseEnum:
"""
Enum base class.
"""
class LogRuntime(BaseEnum):
"""Log runtime enum."""
RT_HOST = 0b01
RT_DEVICE = 0b10
class ErrorCodeType(BaseEnum):
"""Error code type enum."""
ERROR_CODE = 0b01
EXCEPTION_CODE = 0b10
class ErrorLevel(BaseEnum):
"""Error level."""
COMMON_LEVEL = 0b000

View File

@ -72,6 +72,7 @@ EXCEPTIONS = dict(
)
class MindRecordException(Exception):
"""MindRecord base error class."""
@ -168,6 +169,7 @@ class ParamMissError(MindRecordException):
super(ParamMissError, self).__init__()
self.error_msg = "Param missing. '{}' is required.".format(param_name)
class PathNotExistsError(MindRecordException):
"""
invalid path.
@ -176,6 +178,7 @@ class PathNotExistsError(MindRecordException):
super(PathNotExistsError, self).__init__()
self.error_msg = 'Invalid path. {}'.format(error_path)
class DbConnectionError(MindRecordException):
"""
Database connection error.
@ -183,7 +186,8 @@ class DbConnectionError(MindRecordException):
def __init__(self, error_detail):
super(DbConnectionError, self).__init__()
self.error_msg = 'Db connection is error. Detail: {}'.format(error_detail)
##
class MRMOpenError(MindRecordException):
"""
Raised when could not open mind record file successfully.
@ -192,6 +196,7 @@ class MRMOpenError(MindRecordException):
super(MRMOpenError, self).__init__()
self.error_msg = 'MindRecord File could not open successfully.'
class MRMOpenForAppendError(MindRecordException):
"""
Raised when could not open mind record file successfully for append.
@ -200,85 +205,109 @@ class MRMOpenForAppendError(MindRecordException):
super(MRMOpenForAppendError, self).__init__()
self.error_msg = 'MindRecord File could not open successfully for append.'
class MRMInvalidPageSizeError(MindRecordException):
pass
class MRMInvalidHeaderSizeError(MindRecordException):
pass
class MRMSetHeaderError(MindRecordException):
pass
class MRMWriteDatasetError(MindRecordException):
pass
class MRMCommitError(MindRecordException):
pass
class MRMLaunchError(MindRecordException):
pass
class MRMFinishError(MindRecordException):
pass
class MRMCloseError(MindRecordException):
pass
class MRMAddSchemaError(MindRecordException):
pass
class MRMAddIndexError(MindRecordException):
pass
class MRMBuildSchemaError(MindRecordException):
pass
class MRMGetMetaError(MindRecordException):
pass
class MRMIndexGeneratorError(MindRecordException):
pass
class MRMGenerateIndexError(MindRecordException):
pass
class MRMInitSegmentError(MindRecordException):
pass
class MRMFetchCandidateFieldsError(MindRecordException):
pass
class MRMReadCategoryInfoError(MindRecordException):
pass
class MRMFetchDataError(MindRecordException):
pass
class MRMInvalidSchemaError(MindRecordException):
def __init__(self, error_detail):
super(MRMInvalidSchemaError, self).__init__()
self.error_msg = 'Schema format is error. Detail: {}'.format(error_detail)
class MRMValidateDataError(MindRecordException):
def __init__(self, error_detail):
super(MRMValidateDataError, self).__init__()
self.error_msg = 'Raw data do not match the schema. Detail: {}'.format(error_detail)
class MRMDefineIndexError(MindRecordException):
def __init__(self, error_detail):
super(MRMDefineIndexError, self).__init__()
self.error_msg = 'Failed to define index field. Detail: {}'.format(error_detail)
class MRMDefineBlobError(MindRecordException):
def __init__(self, error_detail):
super(MRMDefineBlobError, self).__init__()
self.error_msg = 'Failed to define blob field. Detail: {}'.format(error_detail)
class MRMUnsupportedSchemaError(MindRecordException):
def __init__(self, error_detail):
super(MRMUnsupportedSchemaError, self).__init__()
self.error_msg = 'Schema is not supported. Detail: {}'.format(error_detail)
class MRMDefineCategoryError(MindRecordException):
def __init__(self, error_detail):
super(MRMDefineCategoryError, self).__init__()

View File

@ -21,6 +21,7 @@ from .common.exceptions import MRMAddSchemaError, MRMAddIndexError, MRMBuildSche
__all__ = ['ShardHeader']
class ShardHeader:
"""
Wrapper class which is represent ShardHeader class in c++ module.

View File

@ -21,6 +21,7 @@ from .common.exceptions import MRMIndexGeneratorError, MRMGenerateIndexError
__all__ = ['ShardIndexGenerator']
class ShardIndexGenerator:
"""
Wrapper class which is represent ShardIndexGenerator class in c++ module.

View File

@ -20,6 +20,7 @@ from mindspore import log as logger
from .common.exceptions import MRMOpenError, MRMLaunchError
__all__ = ['ShardReader']
class ShardReader:
"""
Wrapper class which is represent ShardReader class in c++ module.

View File

@ -23,6 +23,7 @@ from .common.exceptions import MRMOpenError, MRMFetchCandidateFieldsError, MRMRe
__all__ = ['ShardSegment']
class ShardSegment:
"""
Wrapper class which is represent ShardSegment class in c++ module.

View File

@ -67,6 +67,7 @@ class ExceptionThread(threading.Thread):
self.exception = e
self.exc_traceback = ''.join(traceback.format_exception(*sys.exc_info()))
def check_filename(path, arg_name=""):
"""
check the filename in the path.
@ -106,6 +107,7 @@ def check_filename(path, arg_name=""):
return True
def check_parameter(func):
"""
decorator for parameter check
@ -136,6 +138,7 @@ def check_parameter(func):
return wrapper
def populate_data(raw, blob, columns, blob_fields, schema):
"""
Reconstruct data form raw and blob data.

View File

@ -34,6 +34,7 @@ safe_builtins = {
'slice',
}
class RestrictedUnpickler(pickle.Unpickler):
"""
Unpickle allowing only few safe classes from the builtins module or numpy
@ -50,7 +51,8 @@ class RestrictedUnpickler(pickle.Unpickler):
if module == "numpy":
return getattr(np, name)
# Forbid everything else.
raise pickle.UnpicklingError("global '%s.%s' is forbidden" %(module, name))
raise pickle.UnpicklingError("global '%s.%s' is forbidden" % (module, name))
def restricted_loads(s):
"""Helper function analogous to pickle.loads()."""
@ -98,11 +100,11 @@ class Cifar10:
files = os.listdir(self.path)
for file in files:
if re.match("data_batch_*", file):
with open(os.path.join(self.path, file), 'rb') as f: #load train data
with open(os.path.join(self.path, file), 'rb') as f: # load train data
dic = restricted_loads(f.read())
images = np.r_[images, dic[b"data"].reshape([-1, 3, 32, 32])]
labels.append(dic[b"labels"])
elif re.match("test_batch", file): #load test data
elif re.match("test_batch", file): # load test data
with open(os.path.join(self.path, file), 'rb') as f:
dic = restricted_loads(f.read())
test_images = np.array(dic[b"data"].reshape([-1, 3, 32, 32]))

View File

@ -33,6 +33,7 @@ safe_builtins = {
'slice',
}
class RestrictedUnpickler(pickle.Unpickler):
"""
Unpickle allowing only few safe classes from the builtins module or numpy
@ -49,7 +50,8 @@ class RestrictedUnpickler(pickle.Unpickler):
if module == "numpy":
return getattr(np, name)
# Forbid everything else.
raise pickle.UnpicklingError("global '%s.%s' is forbidden" %(module, name))
raise pickle.UnpicklingError("global '%s.%s' is forbidden" % (module, name))
def restricted_loads(s):
"""Helper function analogous to pickle.loads()."""
@ -102,12 +104,12 @@ class Cifar100:
files = os.listdir(self.path)
for file in files:
if file == "train":
with open(os.path.join(self.path, file), 'rb') as f: #load train data
with open(os.path.join(self.path, file), 'rb') as f: # load train data
dic = restricted_loads(f.read())
images = np.array(dic[b"data"].reshape([-1, 3, 32, 32]))
fine_labels.append(dic[b"fine_labels"])
coarse_labels.append(dic[b"coarse_labels"])
elif file == "test": #load test data
elif file == "test": # load test data
with open(os.path.join(self.path, file), 'rb') as f:
dic = restricted_loads(f.read())
test_images = np.array(dic[b"data"].reshape([-1, 3, 32, 32]))