forked from mindspore-Ecosystem/mindspore
!1893 Fix codex problem
Merge pull request !1893 from xiefangqi/xfq_fix_codex3
This commit is contained in:
commit
03d9130b18
|
@ -38,8 +38,8 @@ Status CelebAOp::Builder::Build(std::shared_ptr<CelebAOp> *op) {
|
|||
MS_LOG(DEBUG) << "Celeba dataset type is " << builder_dataset_type_.c_str() << ".";
|
||||
RETURN_IF_NOT_OK(SanityCheck());
|
||||
if (builder_sampler_ == nullptr) {
|
||||
int64_t num_samples = 0;
|
||||
int64_t start_index = 0;
|
||||
const int64_t num_samples = 0;
|
||||
const int64_t start_index = 0;
|
||||
builder_sampler_ = std::make_shared<SequentialSampler>(start_index, num_samples);
|
||||
}
|
||||
|
||||
|
|
|
@ -46,8 +46,8 @@ CifarOp::Builder::Builder() : sampler_(nullptr) {
|
|||
Status CifarOp::Builder::Build(std::shared_ptr<CifarOp> *ptr) {
|
||||
RETURN_IF_NOT_OK(SanityCheck());
|
||||
if (sampler_ == nullptr) {
|
||||
int64_t num_samples = 0;
|
||||
int64_t start_index = 0;
|
||||
const int64_t num_samples = 0;
|
||||
const int64_t start_index = 0;
|
||||
sampler_ = std::make_shared<SequentialSampler>(start_index, num_samples);
|
||||
}
|
||||
schema_ = std::make_unique<DataSchema>();
|
||||
|
@ -430,7 +430,11 @@ Status CifarOp::CountTotalRows(const std::string &dir, bool isCIFAR10, int64_t *
|
|||
std::string err_msg = "Invalid cifar100 file path";
|
||||
RETURN_STATUS_UNEXPECTED(err_msg);
|
||||
}
|
||||
std::string file_name(file.substr(pos + 1));
|
||||
std::string file_name;
|
||||
if (file.size() > 0)
|
||||
file_name = file.substr(pos + 1);
|
||||
else
|
||||
RETURN_STATUS_UNEXPECTED("Invalid string length!");
|
||||
if (file_name.find("test") != std::string::npos) {
|
||||
num_cifar100_records = 10000;
|
||||
} else if (file_name.find("train") != std::string::npos) {
|
||||
|
|
|
@ -36,8 +36,8 @@ ImageFolderOp::Builder::Builder() : builder_decode_(false), builder_recursive_(f
|
|||
Status ImageFolderOp::Builder::Build(std::shared_ptr<ImageFolderOp> *ptr) {
|
||||
RETURN_IF_NOT_OK(SanityCheck());
|
||||
if (builder_sampler_ == nullptr) {
|
||||
int64_t num_samples = 0; // default num samples of 0 means to sample entire set of data
|
||||
int64_t start_index = 0;
|
||||
const int64_t num_samples = 0; // default num samples of 0 means to sample entire set of data
|
||||
const int64_t start_index = 0;
|
||||
builder_sampler_ = std::make_shared<SequentialSampler>(start_index, num_samples);
|
||||
}
|
||||
builder_schema_ = std::make_unique<DataSchema>();
|
||||
|
|
|
@ -39,8 +39,8 @@ ManifestOp::Builder::Builder() : builder_sampler_(nullptr), builder_decode_(fals
|
|||
Status ManifestOp::Builder::Build(std::shared_ptr<ManifestOp> *ptr) {
|
||||
RETURN_IF_NOT_OK(SanityCheck());
|
||||
if (builder_sampler_ == nullptr) {
|
||||
int64_t num_samples = 0;
|
||||
int64_t start_index = 0;
|
||||
const int64_t num_samples = 0;
|
||||
const int64_t start_index = 0;
|
||||
builder_sampler_ = std::make_shared<SequentialSampler>(start_index, num_samples);
|
||||
}
|
||||
builder_schema_ = std::make_unique<DataSchema>();
|
||||
|
|
|
@ -124,6 +124,7 @@ MindRecordOp::MindRecordOp(int32_t num_mind_record_workers, int32_t rows_per_buf
|
|||
operators_(operators),
|
||||
num_mind_record_workers_(num_mind_record_workers),
|
||||
block_reader_(block_reader),
|
||||
num_rows_(0),
|
||||
buffers_needed_(0),
|
||||
buf_cnt_(0),
|
||||
ended_worker_(0),
|
||||
|
|
|
@ -41,8 +41,8 @@ MnistOp::Builder::Builder() : builder_sampler_(nullptr) {
|
|||
Status MnistOp::Builder::Build(std::shared_ptr<MnistOp> *ptr) {
|
||||
RETURN_IF_NOT_OK(SanityCheck());
|
||||
if (builder_sampler_ == nullptr) {
|
||||
int64_t num_samples = 0;
|
||||
int64_t start_index = 0;
|
||||
const int64_t num_samples = 0;
|
||||
const int64_t start_index = 0;
|
||||
builder_sampler_ = std::make_shared<SequentialSampler>(start_index, num_samples);
|
||||
}
|
||||
builder_schema_ = std::make_unique<DataSchema>();
|
||||
|
|
|
@ -55,8 +55,8 @@ VOCOp::Builder::Builder() : builder_decode_(false), builder_sampler_(nullptr) {
|
|||
Status VOCOp::Builder::Build(std::shared_ptr<VOCOp> *ptr) {
|
||||
RETURN_IF_NOT_OK(SanityCheck());
|
||||
if (builder_sampler_ == nullptr) {
|
||||
int64_t num_samples = 0;
|
||||
int64_t start_index = 0;
|
||||
const int64_t num_samples = 0;
|
||||
const int64_t start_index = 0;
|
||||
builder_sampler_ = std::make_shared<SequentialSampler>(start_index, num_samples);
|
||||
}
|
||||
builder_schema_ = std::make_unique<DataSchema>();
|
||||
|
|
|
@ -59,6 +59,8 @@ Status NgramOp::Compute(const std::shared_ptr<Tensor> &input, std::shared_ptr<Te
|
|||
if (end_ind - start_ind < n) {
|
||||
res.emplace_back(std::string()); // push back empty string
|
||||
} else {
|
||||
if (end_ind - n < 0) RETURN_STATUS_UNEXPECTED("loop condition error!");
|
||||
|
||||
for (int i = start_ind; i < end_ind - n; i++) {
|
||||
res.emplace_back(str_buffer.substr(offsets[i], offsets[i + n] - offsets[i] - separator_.size()));
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue