diff --git a/mindspore/ccsrc/minddata/dataset/api/datasets.cc b/mindspore/ccsrc/minddata/dataset/api/datasets.cc index 9a68436b5ee..afac5545e5d 100644 --- a/mindspore/ccsrc/minddata/dataset/api/datasets.cc +++ b/mindspore/ccsrc/minddata/dataset/api/datasets.cc @@ -155,8 +155,6 @@ std::shared_ptr Dataset::CreateIteratorCharIF(std::vector> GetDefaultBoxes(BoxesConfig config) { + size_t size = config.num_default.size(); + if (size <= 1 || config.feature_size.size() != size || config.steps.size() != size || + config.aspect_rations.size() != size) { + return {}; + } + if (config.max_scale < config.min_scale) { + return {}; + } std::vector fk; float num = static_cast(config.img_shape[0]); for (int i = 0; i < config.steps.size(); i++) { + if (config.steps[i] == 0) { + return {}; + } fk.push_back(num / config.steps[i]); } - if (config.num_default.size() < 2) { - return {}; - } float scale_rate = (config.max_scale - config.min_scale) / (config.num_default.size() - 1); std::vector scales(config.num_default.size()); for (int i = 0; i < scales.size(); i++) { @@ -1043,7 +1051,15 @@ std::vector> GetDefaultBoxes(BoxesConfig config) { void ConvertBoxes(std::vector> &boxes, const std::vector> &default_boxes, const BoxesConfig config) { + if (boxes.size() != default_boxes.size() || config.prior_scaling.size() != 2) { + boxes = {}; + return; + } for (int i = 0; i < default_boxes.size(); i++) { + if (boxes[i].size() != 4 || default_boxes[i].size() != 4) { + boxes = {}; + return; + } boxes[i][0] = boxes[i][0] * config.prior_scaling[0] * default_boxes[i][2] + default_boxes[i][0]; boxes[i][1] = boxes[i][1] * config.prior_scaling[0] * default_boxes[i][3] + default_boxes[i][1]; boxes[i][2] = exp(boxes[i][2] * config.prior_scaling[1]) * default_boxes[i][2]; @@ -1057,6 +1073,9 @@ std::vector ApplyNms(const std::vector> &all_boxes, std: std::vector areas(boxes_num); std::vector order(boxes_num); for (int i = 0; i < boxes_num; i++) { + if (all_boxes[i].size() < 4) { + return {}; + } areas[i] = (all_boxes[i][3] - all_boxes[i][1] + 1) * (all_boxes[i][2] - all_boxes[i][0] + 1); order[i] = i; } diff --git a/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h b/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h index a3e3a648285..da16b5051df 100644 --- a/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h +++ b/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/image_process.h @@ -215,7 +215,7 @@ bool GetAffineTransform(std::vector src_point, std::vector dst_poi /// \brief Matrix transpose. /// \param[in] src Input matrix. /// \param[in] dst Output matrix. -bool Transpose(LiteMat &src, LiteMat &dst); +bool Transpose(const LiteMat &src, LiteMat &dst); /// \brief Filter the image by a Gaussian kernel /// \param[in] src LiteMat image to be processed. Only LiteMat of type UINT8 is supported now. diff --git a/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.cc b/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.cc index 85ee2fca91c..d555a248c29 100644 --- a/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.cc +++ b/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.cc @@ -177,6 +177,10 @@ void LiteMat::Init(int width, LDataType data_type) { dims_ = 1; height_ = 1; channel_ = 1; + if (!CheckLiteMat()) { + Release(); + return; + } c_step_ = width; size_ = c_step_ * elem_size_; data_ptr_ = AlignMalloc(size_); @@ -193,6 +197,10 @@ void LiteMat::Init(int width, int height, LDataType data_type) { height_ = height; dims_ = 2; channel_ = 1; + if (!CheckLiteMat()) { + Release(); + return; + } c_step_ = width_ * height_; size_ = c_step_ * elem_size_; data_ptr_ = AlignMalloc(size_); @@ -209,6 +217,10 @@ void LiteMat::Init(int width, int height, void *p_data, LDataType data_type) { height_ = height; dims_ = 2; channel_ = 1; + if (!CheckLiteMat()) { + Release(); + return; + } c_step_ = height_ * width_; size_ = c_step_ * channel_ * elem_size_; data_ptr_ = p_data; @@ -225,6 +237,10 @@ void LiteMat::Init(int width, int height, int channel, LDataType data_type, bool height_ = height; dims_ = 3; channel_ = channel; + if (!CheckLiteMat()) { + Release(); + return; + } if (align_memory) { c_step_ = ((height_ * width_ * elem_size_ + ALIGN - 1) & (-ALIGN)) / elem_size_; } else { @@ -247,6 +263,10 @@ void LiteMat::Init(int width, int height, int channel, void *p_data, LDataType d height_ = height; dims_ = 3; channel_ = channel; + if (!CheckLiteMat()) { + Release(); + return; + } c_step_ = height_ * width_; size_ = c_step_ * channel_ * elem_size_; data_ptr_ = p_data; @@ -299,6 +319,24 @@ void LiteMat::AlignFree(void *ptr) { inline void LiteMat::InitElemSize(LDataType data_type) { elem_size_ = data_type.SizeInBytes(); } +bool LiteMat::CheckLiteMat() { + if (width_ <= 0 || height_ <= 0 || channel_ <= 0 || elem_size_ <= 0) { + return false; + } + if (height_ != 1 && height_ > std::numeric_limits::max() / width_) { + return false; + } + int area = height_ * width_; + if (channel_ != 1 && channel_ > std::numeric_limits::max() / area) { + return false; + } + int size = area * channel_; + if (elem_size_ > std::numeric_limits::max() / size) { + return false; + } + return true; +} + bool LiteMat::GetROI(int x, int y, int w, int h, LiteMat &m) { if (x < 0 || y < 0 || x > width_ - w || h > height_ - y || w <= 0 || h <= 0) { return false; diff --git a/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h b/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h index 38273e0be76..6acead3ed05 100644 --- a/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h +++ b/mindspore/ccsrc/minddata/dataset/kernels/image/lite_cv/lite_mat.h @@ -290,6 +290,8 @@ class LiteMat { /// \param[in] c2 The number used to set teh value of step[2]. void setSteps(int c0, int c1, int c2); + bool CheckLiteMat(); + public: void *data_ptr_ = nullptr; int elem_size_; diff --git a/mindspore/ccsrc/minddata/dataset/util/arena.h b/mindspore/ccsrc/minddata/dataset/util/arena.h index 09d6467e78f..117dcd1a9d5 100644 --- a/mindspore/ccsrc/minddata/dataset/util/arena.h +++ b/mindspore/ccsrc/minddata/dataset/util/arena.h @@ -110,7 +110,7 @@ class Arena : public MemoryPool { ~Arena() override { #ifdef ENABLE_GPUQUE if (is_cuda_malloc_) { - if (ptr_) { + if (ptr_ != nullptr) { (void)cudaFreeHost(ptr_); } } diff --git a/mindspore/ccsrc/minddata/dataset/util/auto_index.h b/mindspore/ccsrc/minddata/dataset/util/auto_index.h index 886462248bf..a68d7a586c8 100644 --- a/mindspore/ccsrc/minddata/dataset/util/auto_index.h +++ b/mindspore/ccsrc/minddata/dataset/util/auto_index.h @@ -57,7 +57,7 @@ class AutoIndexObj : public BPlusTree, T> { Status insert(std::unique_ptr &&val, key_type *key = nullptr) { key_type my_inx = inx_.fetch_add(1); - if (key) { + if (key != nullptr) { *key = my_inx; } return my_tree::DoInsert(my_inx, std::move(val)); diff --git a/mindspore/ccsrc/minddata/dataset/util/treap.h b/mindspore/ccsrc/minddata/dataset/util/treap.h index 50c59b4f761..8e8003f2ce9 100644 --- a/mindspore/ccsrc/minddata/dataset/util/treap.h +++ b/mindspore/ccsrc/minddata/dataset/util/treap.h @@ -77,7 +77,7 @@ class Treap { // @return // Return the root of the heap. It has the highest priority. But not necessarily the first key. std::pair Top() const { - if (root_) { + if (root_ != nullptr) { return std::make_pair(root_->nv, true); } else { return std::make_pair(NodeValue{key_type(), priority_type()}, false); @@ -86,7 +86,7 @@ class Treap { // Remove the root of the heap. void Pop() { - if (root_) { + if (root_ != nullptr) { DeleteKey(root_->nv.key); } } @@ -116,9 +116,9 @@ class Treap { class iterator : public std::iterator { public: explicit iterator(Treap *tr) : tr_(tr), cur_(nullptr) { - if (tr_) { + if (tr_ != nullptr) { cur_ = tr_->root_; - while (cur_) { + while (cur_ != nullptr) { stack_.push(cur_); cur_ = cur_->left; } @@ -196,9 +196,9 @@ class Treap { class const_iterator : public std::iterator { public: explicit const_iterator(const Treap *tr) : tr_(tr), cur_(nullptr) { - if (tr_) { + if (tr_ != nullptr) { cur_ = tr_->root_; - while (cur_) { + while (cur_ != nullptr) { stack_.push(cur_); cur_ = cur_->left; } @@ -226,7 +226,7 @@ class Treap { const_iterator &operator++() { if (cur_) { stack_.pop(); - if (cur_->right) { + if (cur_->right != nullptr) { TreapNode *n = cur_->right; while (n) { stack_.push(n); @@ -247,7 +247,7 @@ class Treap { iterator tmp(*this); if (cur_) { stack_.pop(); - if (cur_->right) { + if ((cur_->right) != nullptr) { TreapNode *n = cur_->right; while (n) { stack_.push(n); diff --git a/mindspore/ccsrc/minddata/mindrecord/io/shard_index_generator.cc b/mindspore/ccsrc/minddata/mindrecord/io/shard_index_generator.cc index 35b23dd9037..21e223be24e 100644 --- a/mindspore/ccsrc/minddata/mindrecord/io/shard_index_generator.cc +++ b/mindspore/ccsrc/minddata/mindrecord/io/shard_index_generator.cc @@ -357,7 +357,7 @@ MSRStatus ShardIndexGenerator::BindParameterExecuteSQL( const std::vector>> &data) { sqlite3_stmt *stmt = nullptr; if (sqlite3_prepare_v2(db, common::SafeCStr(sql), -1, &stmt, 0) != SQLITE_OK) { - if (stmt) { + if (stmt != nullptr) { (void)sqlite3_finalize(stmt); } MS_LOG(ERROR) << "SQL error: could not prepare statement, sql: " << sql; diff --git a/mindspore/ccsrc/minddata/mindrecord/io/shard_writer.cc b/mindspore/ccsrc/minddata/mindrecord/io/shard_writer.cc index db6315d9dc1..c23e2656084 100644 --- a/mindspore/ccsrc/minddata/mindrecord/io/shard_writer.cc +++ b/mindspore/ccsrc/minddata/mindrecord/io/shard_writer.cc @@ -558,6 +558,7 @@ int ShardWriter::LockWriter(bool parallel_writer) { flock(fd, LOCK_EX); } else { MS_LOG(ERROR) << "Shard writer failed when locking file"; + close(fd); return -1; } #endif diff --git a/mindspore/ccsrc/minddata/mindrecord/meta/shard_column.cc b/mindspore/ccsrc/minddata/mindrecord/meta/shard_column.cc index a3df213234e..84b8e45d698 100644 --- a/mindspore/ccsrc/minddata/mindrecord/meta/shard_column.cc +++ b/mindspore/ccsrc/minddata/mindrecord/meta/shard_column.cc @@ -424,6 +424,7 @@ MSRStatus ShardColumn::UncompressInt(const uint64_t &column_id, std::unique_ptr< int ret_code = memcpy_s(data_ptr->get(), *num_bytes, data, *num_bytes); if (ret_code != 0) { MS_LOG(ERROR) << "Failed to copy data!"; + return FAILED; } return SUCCESS; diff --git a/mindspore/dataset/datapreprocess/preprocess_imagenet_validate_dataset.py b/mindspore/dataset/datapreprocess/preprocess_imagenet_validate_dataset.py index 5d15318c85c..9321b021eb5 100644 --- a/mindspore/dataset/datapreprocess/preprocess_imagenet_validate_dataset.py +++ b/mindspore/dataset/datapreprocess/preprocess_imagenet_validate_dataset.py @@ -34,7 +34,7 @@ def preprocess_imagenet_validation_dataset(train_dataset_path, validation_datase validate_sub_dir = os.path.join(validation_dataset_path, sub_dir_name) validate_sub_dir = os.path.realpath(validate_sub_dir) if not os.path.exists(validate_sub_dir): - os.makedirs(validate_sub_dir) + os.makedirs(validate_sub_dir, mode=0o750) real_file_path = os.path.realpath(image_label_mapping_file) mappings = [mapping.strip() for mapping in open(real_file_path).readlines()] diff --git a/mindspore/dataset/engine/validators.py b/mindspore/dataset/engine/validators.py index bf688a8e577..ad892842889 100644 --- a/mindspore/dataset/engine/validators.py +++ b/mindspore/dataset/engine/validators.py @@ -284,7 +284,7 @@ def check_save(method): nreq_param_str = ['file_name', 'file_type'] validate_dataset_param_value(nreq_param_int, param_dict, int) if (param_dict.get('num_files') <= 0 or param_dict.get('num_files') > 1000): - raise ValueError("num_files should between {} and {}.".format(1, 1000)) + raise ValueError("num_files should between 0 and 1000.") validate_dataset_param_value(nreq_param_str, param_dict, str) if param_dict.get('file_type') != 'mindrecord': raise ValueError("{} dataset format is not supported.".format(param_dict.get('file_type'))) diff --git a/mindspore/dataset/text/validators.py b/mindspore/dataset/text/validators.py index e027ade0b19..883ac42e890 100644 --- a/mindspore/dataset/text/validators.py +++ b/mindspore/dataset/text/validators.py @@ -318,7 +318,7 @@ def check_from_dataset(method): type_check(freq_range, (tuple,), "freq_range") if len(freq_range) != 2: - raise ValueError("freq_range needs to be a tuple of 2 integers or an int and a None.") + raise ValueError("freq_range needs to be a tuple of 2 element.") for num in freq_range: if num is not None and (not isinstance(num, int)): diff --git a/tests/ut/python/dataset/test_from_dataset.py b/tests/ut/python/dataset/test_from_dataset.py index 801f1aae32f..878ef48091c 100644 --- a/tests/ut/python/dataset/test_from_dataset.py +++ b/tests/ut/python/dataset/test_from_dataset.py @@ -131,7 +131,7 @@ def test_from_dataset_exceptions(): except (TypeError, ValueError) as e: assert s in str(e), str(e) - test_config("text", (), 1, "freq_range needs to be a tuple of 2 integers or an int and a None.") + test_config("text", (), 1, "freq_range needs to be a tuple of 2 element.") test_config("text", (2, 3), 1.2345, "Argument top_k with value 1.2345 is not of type [, ]") test_config(23, (2, 3), 1.2345, "Argument col[0] with value 23 is not of type []") diff --git a/tests/ut/python/dataset/test_save_op.py b/tests/ut/python/dataset/test_save_op.py index 41f34b7b7e2..037460952b6 100644 --- a/tests/ut/python/dataset/test_save_op.py +++ b/tests/ut/python/dataset/test_save_op.py @@ -381,7 +381,7 @@ def test_case_05(add_remove_file): d1 = ds.GeneratorDataset(generator_1d, ["data"], shuffle=False) - with pytest.raises(Exception, match="num_files should between 1 and 1000."): + with pytest.raises(Exception, match="num_files should between 0 and 1000."): d1.save(AUTO_FILE, 0)