forked from mindspore-Ecosystem/mindspore
fix cache description again
This commit is contained in:
parent
b6529b9096
commit
1906ed1be8
|
@ -2144,6 +2144,7 @@ class RepeatDataset(DatasetOp):
|
|||
"""
|
||||
return self.count
|
||||
|
||||
|
||||
class SkipDataset(DatasetOp):
|
||||
"""
|
||||
The result of applying Skip operator to the input Dataset.
|
||||
|
@ -2409,6 +2410,7 @@ class TransferDataset(DatasetOp):
|
|||
def stop_send(self):
|
||||
self.iterator.depipeline.StopSend()
|
||||
|
||||
|
||||
class RangeDataset(MappableDataset):
|
||||
"""
|
||||
A source dataset that reads and parses datasets stored on disk in a range.
|
||||
|
@ -5319,6 +5321,7 @@ class BuildVocabDataset(DatasetOp):
|
|||
|
||||
return new_op
|
||||
|
||||
|
||||
class BuildSentencePieceVocabDataset(DatasetOp):
|
||||
"""
|
||||
Build a SentencePieceVocab from a dataset.
|
||||
|
|
|
@ -24,6 +24,7 @@ DATA_DIR = "../data/dataset/testImageNetData/train/"
|
|||
|
||||
GENERATE_GOLDEN = False
|
||||
|
||||
|
||||
def test_cache_map_basic1():
|
||||
"""
|
||||
Test mappable leaf with cache op right over the leaf
|
||||
|
@ -117,28 +118,28 @@ def test_cache_map_basic3():
|
|||
|
||||
|
||||
def test_cache_map_basic4():
|
||||
"""
|
||||
Test different rows result in core dump
|
||||
"""
|
||||
logger.info("Test cache basic 4")
|
||||
some_cache = ds.DatasetCache(session_id=1, size=0, spilling=True)
|
||||
"""
|
||||
Test different rows result in core dump
|
||||
"""
|
||||
logger.info("Test cache basic 4")
|
||||
some_cache = ds.DatasetCache(session_id=1, size=0, spilling=True)
|
||||
|
||||
# This DATA_DIR only has 2 images in it
|
||||
ds1 = ds.ImageFolderDatasetV2(dataset_dir=DATA_DIR, cache=some_cache)
|
||||
decode_op = c_vision.Decode()
|
||||
ds1 = ds1.repeat(4)
|
||||
ds1 = ds1.map(input_columns=["image"], operations=decode_op)
|
||||
logger.info("ds1.dataset_size is ", ds1.get_dataset_size())
|
||||
shape = ds1.output_shapes()
|
||||
logger.info(shape)
|
||||
num_iter = 0
|
||||
for _ in ds1.create_dict_iterator():
|
||||
logger.info("get data from dataset")
|
||||
num_iter += 1
|
||||
# This DATA_DIR only has 2 images in it
|
||||
ds1 = ds.ImageFolderDatasetV2(dataset_dir=DATA_DIR, cache=some_cache)
|
||||
decode_op = c_vision.Decode()
|
||||
ds1 = ds1.repeat(4)
|
||||
ds1 = ds1.map(input_columns=["image"], operations=decode_op)
|
||||
logger.info("ds1.dataset_size is ", ds1.get_dataset_size())
|
||||
shape = ds1.output_shapes()
|
||||
logger.info(shape)
|
||||
num_iter = 0
|
||||
for _ in ds1.create_dict_iterator():
|
||||
logger.info("get data from dataset")
|
||||
num_iter += 1
|
||||
|
||||
logger.info("Number of data in ds1: {} ".format(num_iter))
|
||||
assert num_iter == 8
|
||||
logger.info('test_cache_basic3 Ended.\n')
|
||||
logger.info("Number of data in ds1: {} ".format(num_iter))
|
||||
assert num_iter == 8
|
||||
logger.info('test_cache_basic3 Ended.\n')
|
||||
|
||||
|
||||
def test_cache_map_failure1():
|
||||
|
@ -177,6 +178,7 @@ def test_cache_map_failure1():
|
|||
assert num_iter == 0
|
||||
logger.info('test_cache_failure1 Ended.\n')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_cache_map_basic1()
|
||||
logger.info("test_cache_map_basic1 success.")
|
||||
|
|
Loading…
Reference in New Issue