!11259 Fix CI issues

From: @ezphlow
Reviewed-by: @nsyca,@robingrosman
Signed-off-by: @robingrosman
This commit is contained in:
mindspore-ci-bot 2021-01-15 05:15:10 +08:00 committed by Gitee
commit ee3cc09b22
15 changed files with 34 additions and 23 deletions

View File

@ -31,6 +31,9 @@ namespace dataset {
Execute::Execute(std::shared_ptr<TensorOperation> op) : op_(std::move(op)) {}
/// \brief Destructor
Execute::~Execute() = default;
#ifdef ENABLE_ANDROID
std::shared_ptr<tensor::MSTensor> Execute::operator()(std::shared_ptr<tensor::MSTensor> input) {
// Build the op

View File

@ -53,37 +53,37 @@ PYBIND_REGISTER(TreeGetters, 1, ([](const py::module *m) {
[](PythonTreeGetters &self, std::shared_ptr<DatasetNode> d) { THROW_IF_ERROR(self.Init(d)); })
.def("GetOutputShapes",
[](PythonTreeGetters &self) {
std::vector<TensorShape> shapes;
std::vector<TensorShape> shapes = {};
THROW_IF_ERROR(self.GetOutputShapes(&shapes));
return shapesToListOfShape(shapes);
})
.def("GetOutputTypes",
[](PythonTreeGetters &self) {
std::vector<DataType> types;
std::vector<DataType> types = {};
THROW_IF_ERROR(self.GetOutputTypes(&types));
return typesToListOfType(types);
})
.def("GetNumClasses",
[](PythonTreeGetters &self) {
int64_t num_classes;
int64_t num_classes = -1;
THROW_IF_ERROR(self.GetNumClasses(&num_classes));
return num_classes;
})
.def("GetRepeatCount",
[](PythonTreeGetters &self) {
int64_t repeat_count;
int64_t repeat_count = -1;
THROW_IF_ERROR(self.GetRepeatCount(&repeat_count));
return repeat_count;
})
.def("GetBatchSize",
[](PythonTreeGetters &self) {
int64_t batch_size;
int64_t batch_size = -1;
THROW_IF_ERROR(self.GetBatchSize(&batch_size));
return batch_size;
})
.def("GetColumnNames",
[](PythonTreeGetters &self) {
std::vector<std::string> col_names;
std::vector<std::string> col_names = {};
THROW_IF_ERROR(self.GetColumnNames(&col_names));
return col_names;
})

View File

@ -202,7 +202,7 @@ std::vector<std::shared_ptr<CsvBase>> toCSVBase(py::list csv_bases) {
return vector;
}
Status ToJson(const py::handle &padded_sample, nlohmann::json *padded_sample_json,
Status ToJson(const py::handle &padded_sample, nlohmann::json *const padded_sample_json,
std::map<std::string, std::string> *sample_bytes) {
for (const py::handle &key : padded_sample) {
if (py::isinstance<py::bytes>(padded_sample[key])) {

View File

@ -73,7 +73,7 @@ std::vector<std::shared_ptr<CsvBase>> toCSVBase(py::list csv_bases);
std::shared_ptr<TensorOp> toPyFuncOp(py::object func, DataType::Type data_type);
Status ToJson(const py::handle &padded_sample, nlohmann::json *padded_sample_json,
Status ToJson(const py::handle &padded_sample, nlohmann::json *const padded_sample_json,
std::map<std::string, std::string> *sample_bytes);
Status toPadInfo(py::dict value, std::map<std::string, std::pair<TensorShape, std::shared_ptr<Tensor>>> *pad_info);

View File

@ -61,7 +61,7 @@ Status PythonSaveToDisk::Save() {
PythonSaveToDisk::PythonSaveToDisk(const std::string &datasetPath, int32_t numFiles, const std::string &datasetType)
: SaveToDisk(datasetPath, numFiles, datasetType) {}
Status PythonTreeGetters::GetRow(TensorRow *r) {
Status PythonTreeGetters::GetRow(TensorRow *const r) {
py::gil_scoped_release gil_release;
return TreeGetters::GetRow(r);
}

View File

@ -53,16 +53,19 @@ class PythonBuildVocabConsumer : public BuildVocabConsumer {
class PythonSaveToDisk : public SaveToDisk {
public:
PythonSaveToDisk(const std::string &datasetPath, int32_t numFiles, const std::string &datasetType);
~PythonSaveToDisk() = default;
Status Save() override;
};
class PythonTreeGetters : public TreeGetters {
public:
Status GetRow(TensorRow *r) override;
Status GetRow(TensorRow *const r) override;
~PythonTreeGetters() = default;
};
class PythonDatasetSizeGetter : public DatasetSizeGetter {
public:
Status GetRow(const std::shared_ptr<TreeAdapter> &tree_adapter, TensorRow *r) override;
~PythonDatasetSizeGetter() = default;
};
} // namespace mindspore::dataset
#endif // MINDSPORE_CCSRC_MINDDATA_DATASET_ENGINE_CONSUMERS_PYTHON_TREE_CONSUMER_H_

View File

@ -62,7 +62,7 @@ Status IteratorConsumer::GetNextAsVector(std::vector<TensorPtr> *out) {
return Status::OK();
}
Status IteratorConsumer::GetNextAsMap(std::unordered_map<std::string, TensorPtr> *out_map) {
Status IteratorConsumer::GetNextAsMap(std::unordered_map<std::string, TensorPtr> *const out_map) {
RETURN_UNEXPECTED_IF_NULL(out_map);
out_map->clear();
@ -79,7 +79,7 @@ Status IteratorConsumer::GetNextAsMap(std::unordered_map<std::string, TensorPtr>
return Status::OK();
}
Status IteratorConsumer::GetNextAsOrderedPair(std::vector<std::pair<std::string, std::shared_ptr<Tensor>>> *vec) {
Status IteratorConsumer::GetNextAsOrderedPair(std::vector<std::pair<std::string, std::shared_ptr<Tensor>>> *const vec) {
CHECK_FAIL_RETURN_UNEXPECTED(vec != nullptr && vec->empty(), "vec is null or non-empty.");
TensorRow curr_row;
@ -142,7 +142,7 @@ Status ToDevice::Stop() {
return Status::OK();
}
Status ToDevice::GetDataInfo(std::vector<DataType> *types, std::vector<TensorShape> *shapes) {
Status ToDevice::GetDataInfo(std::vector<DataType> *const types, std::vector<TensorShape> *const shapes) {
// tree_.root() must be DeviceQueueOp
std::shared_ptr<DatasetOp> root = std::shared_ptr<DatasetOp>(tree_adapter_->GetRoot());
CHECK_FAIL_RETURN_UNEXPECTED(root != nullptr, "Root is a nullptr.");

View File

@ -72,12 +72,12 @@ class IteratorConsumer : public TreeConsumer {
/// Returns the next row in as a map
/// \param[out] out std::map of string to Tensor
/// \return Status error code
Status GetNextAsMap(std::unordered_map<std::string, TensorPtr> *out);
Status GetNextAsMap(std::unordered_map<std::string, TensorPtr> *const out);
/// Returns the next row in as a vector
/// \param[out] out std::vector of pairs of string to Tensor
/// \return Status error code
Status GetNextAsOrderedPair(std::vector<std::pair<std::string, std::shared_ptr<Tensor>>> *vec);
Status GetNextAsOrderedPair(std::vector<std::pair<std::string, std::shared_ptr<Tensor>>> *const vec);
protected:
/// Method to return the name of the consumer
@ -161,7 +161,7 @@ class ToDevice : public TreeConsumer {
/// Get data info from TDT
/// \return Status error code
virtual Status GetDataInfo(std::vector<DataType> *types, std::vector<TensorShape> *shapes);
virtual Status GetDataInfo(std::vector<DataType> *const types, std::vector<TensorShape> *const shapes);
protected:
/// Method to return the name of the consumer

View File

@ -27,7 +27,7 @@ Status PreBuiltDatasetCache::Build() {
return Status::OK();
}
Status PreBuiltDatasetCache::CreateCacheOp(int32_t num_workers, std::shared_ptr<DatasetOp> *ds) {
Status PreBuiltDatasetCache::CreateCacheOp(int32_t num_workers, std::shared_ptr<DatasetOp> *const ds) {
CHECK_FAIL_RETURN_UNEXPECTED(cache_client_ != nullptr, "Cache client has not been created yet.");
std::shared_ptr<CacheOp> cache_op = nullptr;
RETURN_IF_NOT_OK(CacheOp::Builder().SetNumWorkers(num_workers).SetClient(cache_client_).Build(&cache_op));

View File

@ -32,11 +32,13 @@ class PreBuiltDatasetCache : public DatasetCache {
/// \param cc a pre-built cache client
explicit PreBuiltDatasetCache(std::shared_ptr<CacheClient> cc) : cache_client_(std::move(cc)) {}
~PreBuiltDatasetCache() = default;
/// Method to initialize the DatasetCache by creating an instance of a CacheClient
/// \return Status Error code
Status Build() override;
Status CreateCacheOp(int32_t num_workers, std::shared_ptr<DatasetOp> *ds) override;
Status CreateCacheOp(int32_t num_workers, std::shared_ptr<DatasetOp> *const ds) override;
Status ValidateParams() override { return Status::OK(); }

View File

@ -388,7 +388,7 @@ Status DatasetNode::AcceptAfter(IRNodePass *const p, bool *const modified) {
return p->VisitAfter(shared_from_this(), modified);
}
Status DatasetNode::GetShardId(int32_t *shard_id) {
Status DatasetNode::GetShardId(int32_t *const shard_id) {
if (!Children().empty()) {
// Get shard id from the child node
return Children()[0]->GetShardId(shard_id);

View File

@ -169,7 +169,7 @@ class DatasetNode : public std::enable_shared_from_this<DatasetNode> {
/// \brief Pure virtual function for derived class to get the shard id of specific node
/// \return Status Status::OK() if get shard id successfully
virtual Status GetShardId(int32_t *shard_id);
virtual Status GetShardId(int32_t *const shard_id);
/// \brief Gets the dataset size
/// \param[in] size_getter Shared pointer to DatasetSizeGetter

View File

@ -38,6 +38,9 @@ class Execute {
/// \brief Constructor
explicit Execute(std::shared_ptr<TensorOperation> op);
/// \brief Destructor
~Execute();
#ifdef ENABLE_ANDROID
/// \brief callable function to execute the TensorOperation in eager mode
/// \param[inout] input - the tensor to be transformed

View File

@ -220,7 +220,7 @@ static Status JpegReadScanlines(jpeg_decompress_struct *const cinfo, int max_sca
}
if (cinfo->out_color_space == JCS_CMYK && num_lines_read > 0) {
for (int i = 0; i < crop_w; ++i) {
int cmyk_pixel = 4 * i + offset;
const int cmyk_pixel = 4 * i + offset;
const int c = scanline_ptr[cmyk_pixel];
const int m = scanline_ptr[cmyk_pixel + 1];
const int y = scanline_ptr[cmyk_pixel + 2];

View File

@ -99,7 +99,7 @@ static Status JpegReadScanlines(jpeg_decompress_struct *const cinfo, int max_sca
}
if (cinfo->out_color_space == JCS_CMYK && num_lines_read > 0) {
for (int i = 0; i < crop_w; ++i) {
int cmyk_pixel = 4 * i + offset;
const int cmyk_pixel = 4 * i + offset;
const int c = scanline_ptr[cmyk_pixel];
const int m = scanline_ptr[cmyk_pixel + 1];
const int y = scanline_ptr[cmyk_pixel + 2];
@ -119,7 +119,7 @@ static Status JpegReadScanlines(jpeg_decompress_struct *const cinfo, int max_sca
buffer[3 * i + 2] = b;
}
} else if (num_lines_read > 0) {
int copy_status = memcpy_s(buffer, buffer_size, scanline_ptr + offset, stride);
auto copy_status = memcpy_s(buffer, buffer_size, scanline_ptr + offset, stride);
if (copy_status != 0) {
jpeg_destroy_decompress(cinfo);
RETURN_STATUS_UNEXPECTED("memcpy failed");