MindData Codespell correction

This commit is contained in:
luoyang 2021-04-06 19:40:10 +08:00
parent ce248c37e0
commit 624511337c
29 changed files with 86 additions and 86 deletions

View File

@ -28,7 +28,7 @@ namespace dataset {
class TensorOpFusionPass : public IRNodePass {
/// \brief Identifies and fuses tensor ops within MapOp
/// \param[in] node The node being visited
/// \param[inout] *modified indicates whether the node has been visited
/// \param[in, out] *modified indicates whether the node has been visited
/// \return Status The status code returned
Status Visit(std::shared_ptr<MapNode> node, bool *const modified) override;
};

View File

@ -38,31 +38,31 @@ class GeneratorNodePass : public IRNodePass {
/// \brief Record the starting point to collect the Generator node
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status Visit(std::shared_ptr<RepeatNode> node, bool *const modified) override;
/// \brief Record the starting point to collect the Generator node
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status Visit(std::shared_ptr<EpochCtrlNode> node, bool *const modified) override;
/// \brief Add the Generator node to the set
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status Visit(std::shared_ptr<GeneratorNode> node, bool *const modified) override;
/// \brief Add the Generator node(s) from the set to this Repeat node for run-time processing
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status VisitAfter(std::shared_ptr<RepeatNode> node, bool *const modified) override;
/// \brief Add the Generator node(s) from the set to this EpochCtrl node for run-time processing
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status VisitAfter(std::shared_ptr<EpochCtrlNode> node, bool *const modified) override;

View File

@ -37,13 +37,13 @@ class DeepCopyPass : public IRNodePass {
/// \brief Clone a new copy of the node
/// \param[in] node The node being visited
/// \param[inout] *modified indicates whether the node has been visited
/// \param[in, out] *modified indicates whether the node has been visited
/// \return Status code
Status Visit(std::shared_ptr<DatasetNode> node, bool *const modified) override;
/// \brief Reset parent after walking its sub tree.
/// \param[in] node The node being visited
/// \param[inout] *modified indicates whether the node has been visited
/// \param[in, out] *modified indicates whether the node has been visited
/// \return Status code
Status VisitAfter(std::shared_ptr<DatasetNode> node, bool *const modified) override;

View File

@ -44,27 +44,27 @@ class EpochCtrlPass : public IRTreePass {
/// \brief Performs finder work for BuildVocabNode that has special rules about epoch control injection.
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status Visit(std::shared_ptr<RootNode> node, bool *const modified) override;
/// \brief Performs finder work for BuildVocabNode that has special rules about epoch control injection.
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status Visit(std::shared_ptr<BuildVocabNode> node, bool *const modified) override;
#ifndef ENABLE_ANDROID
/// \brief Performs finder work for BuildSentenceVocabNode that has special rules about epoch control injection.
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status Visit(std::shared_ptr<BuildSentenceVocabNode> node, bool *const modified) override;
#endif
/// \brief Register the TransferNode for further action.
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status VisitAfter(std::shared_ptr<TransferNode> node, bool *const modified) override;
@ -87,8 +87,8 @@ class EpochCtrlPass : public IRTreePass {
~EpochCtrlPass() = default;
/// \brief Runs an injection pass to inject in operators needed at the pre pass stage
/// \param[inout] tree The tree to operate on.
/// \param[inout] Indicate of the tree was modified.
/// \param[in, out] tree The tree to operate on.
/// \param[in, out] Indicate of the tree was modified.
/// \return Status The status code returned
Status RunOnTree(std::shared_ptr<DatasetNode> root_ir, bool *const modified) override;
};

View File

@ -29,7 +29,7 @@ namespace dataset {
class InputValidationPass : public IRNodePass {
/// \brief Runs a validation pass to check input parameters
/// \param[in] node The node being visited
/// \param[inout] *modified indicates whether the node has been visited
/// \param[in, out] *modified indicates whether the node has been visited
/// \return Status code
Status Visit(std::shared_ptr<DatasetNode> node, bool *const modified) override;
};

View File

@ -44,19 +44,19 @@ class NodeRemovalPass : public IRTreePass {
/// \brief Perform RepeatNode removal check
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status Visit(std::shared_ptr<RepeatNode> node, bool *const modified) override;
/// \brief Perform SkipNode removal check
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status Visit(std::shared_ptr<SkipNode> node, bool *const modified) override;
/// \brief Perform TakeNode removal check
/// \param[in] node The node being visited
/// \param[inout] modified Indicator if the node was changed at all
/// \param[in, out] modified Indicator if the node was changed at all
/// \return Status The status code returned
Status Visit(std::shared_ptr<TakeNode> node, bool *const modified) override;
@ -76,8 +76,8 @@ class NodeRemovalPass : public IRTreePass {
~NodeRemovalPass() = default;
/// \brief Runs a removal_nodes pass first to find out which nodes to remove, then removes them.
/// \param[inout] tree The tree to operate on.
/// \param[inout] Indicate of the tree was modified.
/// \param[in, out] tree The tree to operate on.
/// \param[in, out] Indicate of the tree was modified.
/// \return Status The status code returned
Status RunOnTree(std::shared_ptr<DatasetNode> root_ir, bool *const modified) override;
};

View File

@ -98,14 +98,14 @@ class BoundingBox {
/// \brief Updates bounding boxes with required Top and Left padding
/// \note Top and Left padding amounts required to adjust bboxs min X,Y values according to padding 'push'
/// Top/Left since images 0,0 coordinate is taken from top left
/// \param bboxList: A tensor contaning bounding box tensors
/// \param bboxList: A tensor containing bounding box tensors
/// \param bboxCount: total Number of bounding boxes - required within caller function to run update loop
/// \param pad_top: Total amount of padding applied to image top
/// \param pad_left: Total amount of padding applied to image left side
static Status PadBBoxes(const TensorPtr *bbox_list, size_t bbox_count, int32_t pad_top, int32_t pad_left);
/// \brief Updates and checks bounding boxes for new cropped region of image
/// \param bbox_list: A tensor contaning bounding box tensors
/// \param bbox_list: A tensor containing bounding box tensors
/// \param bbox_count: total Number of bounding boxes - required within caller function to run update loop
/// \param CB_Xmin: Image's CropBox Xmin coordinate
/// \param CB_Xmin: Image's CropBox Ymin coordinate
@ -116,7 +116,7 @@ class BoundingBox {
/// \brief Updates bounding boxes for an Image Resize Operation - Takes in set of valid BBoxes
/// For e.g those that remain after a crop
/// \param bbox_list: A tensor contaning bounding box tensors
/// \param bbox_list: A tensor containing bounding box tensors
/// \param bbox_count: total Number of bounding boxes - required within caller function to run update loop
/// \param target_width: required width of image post resize
/// \param target_height: required height of image post resize

View File

@ -35,7 +35,7 @@ class BoundingBoxAugmentOp : public TensorOp {
static const float kDefRatio;
// Constructor for BoundingBoxAugmentOp
// @param std::shared_ptr<TensorOp> transform transform: C++ opration to apply on select bounding boxes
// @param std::shared_ptr<TensorOp> transform transform: C++ operation to apply on select bounding boxes
// @param float ratio: ratio of bounding boxes to have the transform applied on
BoundingBoxAugmentOp(std::shared_ptr<TensorOp> transform, float ratio);

View File

@ -33,10 +33,10 @@ struct SoftDpProcsessInfo {
};
struct SoftDpCropInfo {
uint32_t left; // crop left boundry
uint32_t right; // crop right boundry
uint32_t up; // crop up boundry
uint32_t down; // crop down boundry
uint32_t left; // crop left boundary
uint32_t right; // crop right boundary
uint32_t up; // crop up boundary
uint32_t down; // crop down boundary
};
/*

View File

@ -190,11 +190,11 @@ uint32_t SoftJpegd::ConfigVpcInputData(struct VpcInfo *vpc_input_info, int32_t *
}
/*
* @brief : destory libjpeg source
* @brief : destroy libjpeg source
* @param [in] struct jpeg_decompress_struct &libjpeg_handler : libjpeg handle.
* @param [in] tjhandle &handle : tjhandle.
*/
void DestoryLibjpegSource(struct jpeg_decompress_struct *libjpeg_handler, const tjhandle &handle) {
void DestroyLibjpegSource(struct jpeg_decompress_struct *libjpeg_handler, const tjhandle &handle) {
(void)tjDestroy(handle);
jpeg_destroy_decompress(libjpeg_handler);
}
@ -211,7 +211,7 @@ uint32_t SoftJpegd::JpegdSoftwareDecodeProcess(struct VpcInfo *vpc_input_info,
int32_t prepare_decode_res = PrepareDecode(&libjpeg_handler, vpc_input_info, soft_dp_process_info);
if (prepare_decode_res != decodeSucc) {
JPEGD_LOGE("prepare decode failed!");
DestoryLibjpegSource(&libjpeg_handler, handle);
DestroyLibjpegSource(&libjpeg_handler, handle);
return decodeErr;
}
@ -220,14 +220,14 @@ uint32_t SoftJpegd::JpegdSoftwareDecodeProcess(struct VpcInfo *vpc_input_info,
&height, &sub_sample, &color_spase);
if (decode_header_res != decodeSucc) {
JPEGD_LOGE("Decompress header failed, width = %d, height = %d.", width, height);
DestoryLibjpegSource(&libjpeg_handler, handle);
DestroyLibjpegSource(&libjpeg_handler, handle);
return decodeErr;
}
int32_t alloc_out_buf_res = AllocOutputBuffer(vpc_input_info, &width, &height, &sub_sample);
if (alloc_out_buf_res != decodeSucc) {
JPEGD_LOGE("alloc output buffer failed!");
DestoryLibjpegSource(&libjpeg_handler, handle);
DestroyLibjpegSource(&libjpeg_handler, handle);
return decodeErr;
}
@ -239,15 +239,15 @@ uint32_t SoftJpegd::JpegdSoftwareDecodeProcess(struct VpcInfo *vpc_input_info,
std::this_thread::get_id());
delete[] soft_decode_out_buf_;
soft_decode_out_buf_ = nullptr;
DestoryLibjpegSource(&libjpeg_handler, handle);
DestroyLibjpegSource(&libjpeg_handler, handle);
return decodeErr;
}
int32_t config_vpc_res = ConfigVpcInputData(vpc_input_info, &width, &height);
if (config_vpc_res != decodeSucc) {
DestoryLibjpegSource(&libjpeg_handler, handle);
DestroyLibjpegSource(&libjpeg_handler, handle);
return decodeErr;
}
DestoryLibjpegSource(&libjpeg_handler, handle);
DestroyLibjpegSource(&libjpeg_handler, handle);
return decodeSucc;
}

View File

@ -35,7 +35,7 @@ class SoftJpegd {
* @brief : decode interface
* @param [in] VpcInfo& vpc_input_info : vpc input information
* @param [in] SoftDpProcsessInfo& soft_dp_process_info : softDp process info
* @return : decodeSuccdecode success decodeErr:decode failed.
* @return : decodeSuccdecode success, decodeErr:decode failed.
*/
uint32_t JpegdSoftwareDecodeProcess(struct VpcInfo *vpc_input_info, struct SoftDpProcsessInfo *soft_dp_process_info);
@ -50,7 +50,7 @@ class SoftJpegd {
* @param [in] int32_t& sub_sample : level of chrominance subsampling in the image
* @param [in] int32_t& color_spase : pointer to an integer variable that will receive one of the JPEG
* constants, indicating the colorspace of the JPEG image.
* @return : decodeSuccalloc output buf success decodeErr:alloc output buf failed.
* @return : decodeSuccalloc output buf success, decodeErr:alloc output buf failed.
*/
uint32_t AllocOutputBuffer(struct VpcInfo *vpc_input_info, int32_t *width, int32_t *height, int32_t *sub_sample);
@ -59,7 +59,7 @@ class SoftJpegd {
* @param [in] VpcInfo& vpc_input_info : vpc input information
* @param [in] int32_t& width : output width
* @param [in] int32_t& height : output height
* @return : decodeSuccconfig output buf succes decodeErr:config output buf failed.
* @return : decodeSuccconfig output buf success, decodeErr:config output buf failed.
*/
uint32_t ConfigVpcInputData(struct VpcInfo *vpc_input_info, int32_t *width, int32_t *height);
};

View File

@ -375,11 +375,11 @@ void SoftVpc::ChipPreProcess() {
YuvWPara *yuv_scaler_paraset = YuvScalerParaSet::GetInstance();
YuvScalerPara *scale = yuv_scaler_paraset->scale;
int32_t index = GetScalerParamterIndex(horizon_coeff_, yuv_scaler_paraset);
int32_t index = GetScalerParameterIndex(horizon_coeff_, yuv_scaler_paraset);
y_horizon_tap_ = scale[index].taps_6;
uv_horizon_tap_ = scale[index].taps_4;
index = GetScalerParamterIndex(vertical_coeff_, yuv_scaler_paraset);
index = GetScalerParameterIndex(vertical_coeff_, yuv_scaler_paraset);
vertical_tap_ = (half_line_mode_) ? scale[index].taps_6 : scale[index].taps_4;
}

View File

@ -249,25 +249,25 @@ YuvWPara *YuvScalerParaSet::GetInstance(std::string *paraset_name, uint32_t yuv_
}
// Searching for the index number of the filtering parameter by using the dichotomy
int32_t GetScalerParamterIndex(uint32_t paramter, YuvWPara *paramterset) {
int32_t count = paramterset->real_count;
int32_t GetScalerParameterIndex(uint32_t parameter, YuvWPara *parameterset) {
int32_t count = parameterset->real_count;
int32_t left = 0;
int32_t right = count - 1;
YuvScalerPara *scaler = paramterset->scale;
YuvScalerPara *scaler = parameterset->scale;
int32_t index = 0;
if (paramter <= scalerRadio1Time) {
if (parameter <= scalerRadio1Time) {
index = 0;
} else {
paramter = paramter >> paramterInterval;
parameter = parameter >> parameterInterval;
while (left <= right) {
index = (left + right) / 2; // 2-point search
if (paramter > scaler[index].range.start && paramter <= scaler[index].range.end) {
if (parameter > scaler[index].range.start && parameter <= scaler[index].range.end) {
break;
}
if (paramter > scaler[index].range.end) {
if (parameter > scaler[index].range.end) {
left = index + 1;
} else if (paramter <= scaler[index].range.start) {
} else if (parameter <= scaler[index].range.start) {
right = index - 1;
}
}

View File

@ -25,7 +25,7 @@ const uint32_t maxFileCount = 10;
const uint32_t kMaxParaCount = 2048;
const uint32_t kScalerCoffNb4 = 32;
const uint32_t kScalerCoffNb6 = 48;
const uint32_t paramterInterval = 6; // scaler Number of bits between each group of filtering coefficients
const uint32_t parameterInterval = 6; // scaler Number of bits between each group of filtering coefficients
const uint32_t scalerRadio1Time = 0X10000;
struct ScalerCoefficientIndex {
@ -83,7 +83,7 @@ class YuvScalerParaSet {
static pthread_mutex_t g_mutex_;
};
int32_t GetScalerParamterIndex(uint32_t paramter, YuvWPara *paramterset);
int32_t GetScalerParameterIndex(uint32_t parameter, YuvWPara *parameterset);
constexpr YuvWPara YUV_W_PARA = {
1024,

View File

@ -49,7 +49,7 @@ class BasicTokenizerOp : public TensorOp {
protected:
Status CaseFoldWithoutUnusedWords(const std::string_view &text, const std::unordered_set<std::string> &unused_words,
std::string *outupt);
std::string *output);
Status CaseFoldWithoutUnusedWords(const std::shared_ptr<Tensor> &input, std::shared_ptr<Tensor> *output);
std::string Name() const override { return kBasicTokenizerOp; }

View File

@ -25,7 +25,7 @@ struct bitmask {
uint64_t *maskp;
};
// Now we seperate the link from _c_dataengine with numa,
// Now we separate the link from _c_dataengine with numa,
// and we use dlopen("libnuma") instead. This function will
// return a handle which you can do NumaBind and ReleaseLibrary.
void *GetNumaAdapterHandle();

View File

@ -105,7 +105,7 @@ class __attribute__((visibility("default"))) ShardColumn {
std::unique_ptr<unsigned char[]> *data_ptr, uint64_t *n_bytes);
private:
/// \brief intialization
/// \brief initialization
void Init(const json &schema_json, bool compress_integer = true);
/// \brief get float value from json

View File

@ -142,7 +142,7 @@ class __attribute__((visibility("default"))) ShardHeader {
/// \brief get the headers from all the shard data
/// \param[in] the shard data real path
/// \param[in] the headers which readed from the shard data
/// \param[in] the headers which read from the shard data
/// \return SUCCESS/FAILED
MSRStatus GetHeaders(const vector<string> &real_addresses, std::vector<json> &headers);

View File

@ -163,7 +163,7 @@ MSRStatus ShardReader::Init(const std::vector<std::string> &file_paths, bool loa
auto disk_size = page_size_ * row_group_summary.size();
auto compression_size = shard_header_->GetCompressionSize();
total_blob_size_ = disk_size + compression_size;
MS_LOG(INFO) << "Blob data size, on disk: " << disk_size << " , addtional uncompression: " << compression_size
MS_LOG(INFO) << "Blob data size, on disk: " << disk_size << " , additional uncompression: " << compression_size
<< " , Total: " << total_blob_size_;
MS_LOG(INFO) << "Get meta from mindrecord file & index file successfully.";

View File

@ -326,9 +326,9 @@ std::vector<uint8_t> ShardColumn::CompressBlob(const std::vector<uint8_t> &blob,
auto dst_blob_slice = CompressInt(blob_slice, int_type);
// Get new column size
auto new_blob_size = UIntToBytesBig(dst_blob_slice.size(), kInt64Type);
// Append new colmn size
// Append new column size
dst_blob.insert(dst_blob.end(), new_blob_size.begin(), new_blob_size.end());
// Append new colmn data
// Append new column data
dst_blob.insert(dst_blob.end(), dst_blob_slice.begin(), dst_blob_slice.end());
i_src += kInt64Len + num_bytes;
}
@ -344,7 +344,7 @@ vector<uint8_t> ShardColumn::CompressInt(const vector<uint8_t> &src_bytes, const
// Calculate bitmap size (bytes)
uint64_t bitmap_size = (src_n_int + kNumDataOfByte - 1) / kNumDataOfByte;
// Initilize destination blob, more space than needed, will be resized
// Initialize destination blob, more space than needed, will be resized
vector<uint8_t> dst_bytes(kBytesOfColumnLen + bitmap_size + src_bytes.size(), 0);
// Write number of elements to destination blob

View File

@ -55,7 +55,7 @@ class BBoxOpCommon : public CVOpCommon {
/// \brief Load BBox data from an XML file into a Tensor
/// \param[in] path path to XML bbox data file
/// \param[inout] target_BBox pointer to a Tensor to load
/// \param[in, out] target_BBox pointer to a Tensor to load
/// \return True if file loaded successfully, false if error -> logged to STD out
bool LoadAnnotationFile(const std::string &path, std::shared_ptr<Tensor> *target_BBox);

View File

@ -104,7 +104,7 @@ TEST_F(TestShardWriter, TestShardWriterShiftRawPage) {
LoadData(input_path1, json_buffer1, kMaxNum);
MS_LOG(INFO) << "Load Meta Data Already.";
// get files' pathes stored in vector<string> image_filenames
// get files' paths stored in vector<string> image_filenames
mindrecord::GetAbsoluteFiles(path_dir, image_filenames); // get all files whose path within path_dir
MS_LOG(INFO) << "Only process 10 file names:";
image_filenames.resize(kMaxNum);
@ -236,7 +236,7 @@ TEST_F(TestShardWriter, TestShardWriterTrial) {
LoadData(input_path1, json_buffer1, kMaxNum);
MS_LOG(INFO) << "Load Meta Data Already.";
// get files' pathes stored in vector<string> image_filenames
// get files' paths stored in vector<string> image_filenames
mindrecord::GetAbsoluteFiles(path_dir, image_filenames); // get all files whose path within path_dir
MS_LOG(INFO) << "Only process 10 file names:";
image_filenames.resize(kMaxNum);
@ -375,7 +375,7 @@ TEST_F(TestShardWriter, TestShardWriterTrialNoFields) {
LoadData(input_path1, json_buffer1, kMaxNum);
MS_LOG(INFO) << "Load Meta Data Already.";
// get files' pathes stored in vector<string> image_filenames
// get files' paths stored in vector<string> image_filenames
mindrecord::GetAbsoluteFiles(path_dir, image_filenames); // get all files whose path within path_dir
MS_LOG(INFO) << "Only process 10 file names:";
image_filenames.resize(kMaxNum);
@ -509,7 +509,7 @@ TEST_F(TestShardWriter, DataCheck) {
LoadData(input_path1, json_buffer1, kMaxNum);
MS_LOG(INFO) << "Load Meta Data Already.";
// get files' pathes stored in vector<string> image_filenames
// get files' paths stored in vector<string> image_filenames
mindrecord::GetAbsoluteFiles(path_dir, image_filenames); // get all files whose path within path_dir
MS_LOG(INFO) << "Only process 10 file names:";
image_filenames.resize(kMaxNum);
@ -610,7 +610,7 @@ TEST_F(TestShardWriter, AllRawDataWrong) {
LoadData(input_path1, json_buffer1, kMaxNum);
MS_LOG(INFO) << "Load Meta Data Already.";
// get files' pathes stored in vector<string> image_filenames
// get files' paths stored in vector<string> image_filenames
mindrecord::GetAbsoluteFiles(path_dir, image_filenames); // get all files whose path within path_dir
MS_LOG(INFO) << "Only process 10 file names:";
image_filenames.resize(kMaxNum);

View File

@ -34,7 +34,7 @@ def generator_22to24():
def test_simple_repeat():
# Since numer of epoch is 1, the GeneratorPass logic will not add the reset logic.
# Since number of epoch is 1, the GeneratorPass logic will not add the reset logic.
logger.info("test_simple_repeat")
# apply dataset operations
data1 = ds.GeneratorDataset(generator_1to2, ["data"])
@ -59,7 +59,7 @@ def test_generator_reset_1():
branch1 = data1.repeat(4)
data2 = ds.GeneratorDataset(generator_10to12, ["data"])
branch2 = data2.repeat(2)
branch2 = branch2.take(10) # Meaningless opearation, just want to insert an op in between
branch2 = branch2.take(10) # Meaningless operation, just want to insert an op in between
data3 = ds.GeneratorDataset(generator_22to24, ["data"])
branch3 = data3.repeat(3)
branch3 = branch3.skip(1) # Skip the first row
@ -86,7 +86,7 @@ def test_generator_reset_2():
branch1 = data1.repeat(3)
data2 = ds.GeneratorDataset(generator_10to12, ["data"])
branch2 = data2.repeat(2)
branch2 = branch2.take(10) # Meaningless opearation, just want to insert an op in between
branch2 = branch2.take(10) # Meaningless operation, just want to insert an op in between
data3 = ds.GeneratorDataset(generator_22to24, ["data"])
branch3 = data3.skip(2) # Skip the first row

View File

@ -276,8 +276,8 @@ def test_pyfunc_implicit_compose():
i = i + 4
def test_pyfunc_execption():
logger.info("Test PyFunc Execption Throw: lambda x : raise Execption()")
def test_pyfunc_exception():
logger.info("Test PyFunc Exception Throw: lambda x : raise Exception()")
def pyfunc(x):
raise Exception("Pyfunc Throw")
@ -292,8 +292,8 @@ def test_pyfunc_execption():
assert "Pyfunc Throw" in str(info.value)
def skip_test_pyfunc_execption_multiprocess():
logger.info("Test Multiprocess PyFunc Execption Throw: lambda x : raise Execption()")
def skip_test_pyfunc_Exception_multiprocess():
logger.info("Test Multiprocess PyFunc Exception Throw: lambda x : raise Exception()")
def pyfunc(x):
raise Exception("MP Pyfunc Throw")
@ -320,5 +320,5 @@ if __name__ == "__main__":
test_case_8()
test_case_9()
test_pyfunc_implicit_compose()
test_pyfunc_execption()
skip_test_pyfunc_execption_multiprocess()
test_pyfunc_exception()
skip_test_pyfunc_exception_multiprocess()

View File

@ -154,7 +154,7 @@ def test_random_rotation_md5():
original_seed = config_get_set_seed(5)
original_num_parallel_workers = config_get_set_num_parallel_workers(1)
# Fisrt dataset
# First dataset
data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
decode_op = c_vision.Decode()
resize_op = c_vision.RandomRotation((0, 90),

View File

@ -305,7 +305,7 @@ def test_take_16():
def test_take_17():
"""
Test take: take first, then do fiter operation
Test take: take first, then do filter operation
"""
logger.info("test_take_17")
data1 = ds.GeneratorDataset(generator_10, ["data"])
@ -322,7 +322,7 @@ def test_take_17():
def test_take_18():
"""
Test take: take first, then do fiter, skip, batch and repeat operation
Test take: take first, then do filter, skip, batch and repeat operation
"""
logger.info("test_take_18")
data1 = ds.GeneratorDataset(generator_10, ["data"])

View File

@ -547,7 +547,7 @@ def skip_test_issue_155():
count = 0
for _ in reader.get_next():
count += 1
assert count == 10000, "Failed to read mutiple writed data."
assert count == 10000, "Failed to read multiple writed data."
def test_issue_124():

View File

@ -46,7 +46,7 @@ def test_cifar100_to_mindrecord_without_index_fields(fixture_file):
"""test transform cifar100 dataset to mindrecord without index fields."""
cifar100_transformer = Cifar100ToMR(CIFAR100_DIR, MINDRECORD_FILE)
ret = cifar100_transformer.transform()
assert ret == SUCCESS, "Failed to tranform from cifar100 to mindrecord"
assert ret == SUCCESS, "Failed to transform from cifar100 to mindrecord"
assert os.path.exists(MINDRECORD_FILE)
assert os.path.exists(MINDRECORD_FILE + "_test")
read()

View File

@ -591,27 +591,27 @@ def test_write_with_invalid_data():
writer.write_raw_data(data)
writer.commit()
# field: label => lable
# field: label => labels
with pytest.raises(Exception, match="Failed to write dataset"):
remove_one_file(mindrecord_file_name)
remove_one_file(mindrecord_file_name + ".db")
data = [{"file_name": "001.jpg", "lable": 43, "score": 0.8, "mask": np.array([3, 6, 9], dtype=np.int64),
data = [{"file_name": "001.jpg", "labels": 43, "score": 0.8, "mask": np.array([3, 6, 9], dtype=np.int64),
"segments": np.array([[5.0, 1.6], [65.2, 8.3]], dtype=np.float32),
"data": bytes("image bytes abc", encoding='UTF-8')},
{"file_name": "002.jpg", "lable": 91, "score": 5.4, "mask": np.array([1, 4, 7], dtype=np.int64),
{"file_name": "002.jpg", "labels": 91, "score": 5.4, "mask": np.array([1, 4, 7], dtype=np.int64),
"segments": np.array([[5.1, 9.1], [2.0, 65.4]], dtype=np.float32),
"data": bytes("image bytes def", encoding='UTF-8')},
{"file_name": "003.jpg", "lable": 61, "score": 6.4, "mask": np.array([7, 6, 3], dtype=np.int64),
{"file_name": "003.jpg", "labels": 61, "score": 6.4, "mask": np.array([7, 6, 3], dtype=np.int64),
"segments": np.array([[0.0, 5.6], [3.0, 16.3]], dtype=np.float32),
"data": bytes("image bytes ghi", encoding='UTF-8')},
{"file_name": "004.jpg", "lable": 29, "score": 8.1, "mask": np.array([2, 8, 0], dtype=np.int64),
{"file_name": "004.jpg", "labels": 29, "score": 8.1, "mask": np.array([2, 8, 0], dtype=np.int64),
"segments": np.array([[5.9, 7.2], [4.0, 89.0]], dtype=np.float32),
"data": bytes("image bytes jkl", encoding='UTF-8')},
{"file_name": "005.jpg", "lable": 78, "score": 7.7, "mask": np.array([3, 1, 2], dtype=np.int64),
{"file_name": "005.jpg", "labels": 78, "score": 7.7, "mask": np.array([3, 1, 2], dtype=np.int64),
"segments": np.array([[0.6, 8.1], [5.3, 49.3]], dtype=np.float32),
"data": bytes("image bytes mno", encoding='UTF-8')},
{"file_name": "006.jpg", "lable": 37, "score": 9.4, "mask": np.array([7, 6, 7], dtype=np.int64),
{"file_name": "006.jpg", "labels": 37, "score": 9.4, "mask": np.array([7, 6, 7], dtype=np.int64),
"segments": np.array([[4.2, 6.3], [8.9, 81.8]], dtype=np.float32),
"data": bytes("image bytes pqr", encoding='UTF-8')}
]