use np.testing.assert instead of asserting

fix line lengths

fix spacing

reverse a few changes
This commit is contained in:
tony_liu2 2020-07-27 14:27:11 -04:00
parent ee67f70b73
commit 269b477684
14 changed files with 189 additions and 187 deletions

View File

@ -31,13 +31,13 @@ def test_basic():
arr[0] = 0
x = np.array([0, 2, 3, 4, 5])
assert np.array_equal(x, arr)
np.testing.assert_array_equal(x, arr)
assert n.type() == cde.DataType("int64")
arr2 = n.as_array()
arr[0] = 2
x = np.array([2, 2, 3, 4, 5])
assert np.array_equal(x, arr2)
np.testing.assert_array_equal(x, arr2)
assert n.type() == cde.DataType("int64")
assert arr.__array_interface__['data'] == arr2.__array_interface__['data']
@ -47,12 +47,12 @@ def test_strides():
n1 = cde.Tensor(x[:, 1])
arr = np.array(n1, copy=False)
assert np.array_equal(x[:, 1], arr)
np.testing.assert_array_equal(x[:, 1], arr)
n2 = cde.Tensor(x.transpose())
arr = np.array(n2, copy=False)
assert np.array_equal(x.transpose(), arr)
np.testing.assert_array_equal(x.transpose(), arr)
if __name__ == '__main__':

View File

@ -41,7 +41,7 @@ def test_apply_generator_case():
data2 = data2.batch(4)
for item1, item2 in zip(data1.create_dict_iterator(), data2.create_dict_iterator()):
assert np.array_equal(item1["data"], item2["data"])
np.testing.assert_array_equal(item1["data"], item2["data"])
def test_apply_imagefolder_case():
@ -64,7 +64,7 @@ def test_apply_imagefolder_case():
data2 = data2.repeat(2)
for item1, item2 in zip(data1.create_dict_iterator(), data2.create_dict_iterator()):
assert np.array_equal(item1["image"], item2["image"])
np.testing.assert_array_equal(item1["image"], item2["image"])
def test_apply_flow_case_0(id_=0):

View File

@ -44,18 +44,18 @@ def test_coco_detection():
assert image_shape[3] == (642, 675, 3)
assert image_shape[4] == (2268, 4032, 3)
assert image_shape[5] == (2268, 4032, 3)
assert np.array_equal(np.array([[10., 10., 10., 10.], [70., 70., 70., 70.]]), bbox[0])
assert np.array_equal(np.array([[20., 20., 20., 20.], [80., 80., 80.0, 80.]]), bbox[1])
assert np.array_equal(np.array([[30.0, 30.0, 30.0, 30.]]), bbox[2])
assert np.array_equal(np.array([[40., 40., 40., 40.]]), bbox[3])
assert np.array_equal(np.array([[50., 50., 50., 50.]]), bbox[4])
assert np.array_equal(np.array([[60., 60., 60., 60.]]), bbox[5])
assert np.array_equal(np.array([[1], [7]]), category_id[0])
assert np.array_equal(np.array([[2], [8]]), category_id[1])
assert np.array_equal(np.array([[3]]), category_id[2])
assert np.array_equal(np.array([[4]]), category_id[3])
assert np.array_equal(np.array([[5]]), category_id[4])
assert np.array_equal(np.array([[6]]), category_id[5])
np.testing.assert_array_equal(np.array([[10., 10., 10., 10.], [70., 70., 70., 70.]]), bbox[0])
np.testing.assert_array_equal(np.array([[20., 20., 20., 20.], [80., 80., 80.0, 80.]]), bbox[1])
np.testing.assert_array_equal(np.array([[30.0, 30.0, 30.0, 30.]]), bbox[2])
np.testing.assert_array_equal(np.array([[40., 40., 40., 40.]]), bbox[3])
np.testing.assert_array_equal(np.array([[50., 50., 50., 50.]]), bbox[4])
np.testing.assert_array_equal(np.array([[60., 60., 60., 60.]]), bbox[5])
np.testing.assert_array_equal(np.array([[1], [7]]), category_id[0])
np.testing.assert_array_equal(np.array([[2], [8]]), category_id[1])
np.testing.assert_array_equal(np.array([[3]]), category_id[2])
np.testing.assert_array_equal(np.array([[4]]), category_id[3])
np.testing.assert_array_equal(np.array([[5]]), category_id[4])
np.testing.assert_array_equal(np.array([[6]]), category_id[5])
def test_coco_stuff():
data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff",
@ -76,25 +76,26 @@ def test_coco_stuff():
assert image_shape[3] == (642, 675, 3)
assert image_shape[4] == (2268, 4032, 3)
assert image_shape[5] == (2268, 4032, 3)
assert np.array_equal(np.array([[10., 12., 13., 14., 15., 16., 17., 18., 19., 20.],
[70., 72., 73., 74., 75., -1., -1., -1., -1., -1.]]),
segmentation[0])
assert np.array_equal(np.array([[0], [0]]), iscrowd[0])
assert np.array_equal(np.array([[20.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0],
[10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, -1.0]]),
segmentation[1])
assert np.array_equal(np.array([[0], [1]]), iscrowd[1])
assert np.array_equal(np.array([[40., 42., 43., 44., 45., 46., 47., 48., 49., 40., 41., 42.]]), segmentation[2])
assert np.array_equal(np.array([[0]]), iscrowd[2])
assert np.array_equal(np.array([[50., 52., 53., 54., 55., 56., 57., 58., 59., 60., 61., 62., 63.]]),
segmentation[3])
assert np.array_equal(np.array([[0]]), iscrowd[3])
assert np.array_equal(np.array([[60., 62., 63., 64., 65., 66., 67., 68., 69., 70., 71., 72., 73., 74.]]),
segmentation[4])
assert np.array_equal(np.array([[0]]), iscrowd[4])
assert np.array_equal(np.array([[60., 62., 63., 64., 65., 66., 67.], [68., 69., 70., 71., 72., 73., 74.]]),
segmentation[5])
assert np.array_equal(np.array([[0]]), iscrowd[5])
np.testing.assert_array_equal(np.array([[10., 12., 13., 14., 15., 16., 17., 18., 19., 20.],
[70., 72., 73., 74., 75., -1., -1., -1., -1., -1.]]),
segmentation[0])
np.testing.assert_array_equal(np.array([[0], [0]]), iscrowd[0])
np.testing.assert_array_equal(np.array([[20.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0],
[10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, -1.0]]),
segmentation[1])
np.testing.assert_array_equal(np.array([[0], [1]]), iscrowd[1])
np.testing.assert_array_equal(np.array([[40., 42., 43., 44., 45., 46., 47., 48., 49., 40., 41., 42.]]),
segmentation[2])
np.testing.assert_array_equal(np.array([[0]]), iscrowd[2])
np.testing.assert_array_equal(np.array([[50., 52., 53., 54., 55., 56., 57., 58., 59., 60., 61., 62., 63.]]),
segmentation[3])
np.testing.assert_array_equal(np.array([[0]]), iscrowd[3])
np.testing.assert_array_equal(np.array([[60., 62., 63., 64., 65., 66., 67., 68., 69., 70., 71., 72., 73., 74.]]),
segmentation[4])
np.testing.assert_array_equal(np.array([[0]]), iscrowd[4])
np.testing.assert_array_equal(np.array([[60., 62., 63., 64., 65., 66., 67.], [68., 69., 70., 71., 72., 73., 74.]]),
segmentation[5])
np.testing.assert_array_equal(np.array([[0]]), iscrowd[5])
def test_coco_keypoint():
data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint",
@ -111,16 +112,17 @@ def test_coco_keypoint():
assert num_iter == 2
assert image_shape[0] == (2268, 4032, 3)
assert image_shape[1] == (561, 595, 3)
assert np.array_equal(np.array([[368., 61., 1., 369., 52., 2., 0., 0., 0., 382., 48., 2., 0., 0., 0., 368., 84., 2.,
435., 81., 2., 362., 125., 2., 446., 125., 2., 360., 153., 2., 0., 0., 0., 397.,
167., 1., 439., 166., 1., 369., 193., 2., 461., 234., 2., 361., 246., 2., 474.,
287., 2.]]), keypoints[0])
assert np.array_equal(np.array([[14]]), num_keypoints[0])
assert np.array_equal(np.array([[244., 139., 2., 0., 0., 0., 226., 118., 2., 0., 0., 0., 154., 159., 2., 143., 261.,
2., 135., 312., 2., 271., 423., 2., 184., 530., 2., 261., 280., 2., 347., 592., 2.,
0., 0., 0., 123., 596., 2., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]]),
keypoints[1])
assert np.array_equal(np.array([[10]]), num_keypoints[1])
np.testing.assert_array_equal(np.array([[368., 61., 1., 369., 52., 2., 0., 0., 0., 382., 48., 2., 0., 0., 0., 368.,
84., 2., 435., 81., 2., 362., 125., 2., 446., 125., 2., 360., 153., 2., 0.,
0., 0., 397., 167., 1., 439., 166., 1., 369., 193., 2., 461., 234., 2.,
361., 246., 2., 474., 287., 2.]]), keypoints[0])
np.testing.assert_array_equal(np.array([[14]]), num_keypoints[0])
np.testing.assert_array_equal(np.array([[244., 139., 2., 0., 0., 0., 226., 118., 2., 0., 0., 0., 154., 159., 2.,
143., 261., 2., 135., 312., 2., 271., 423., 2., 184., 530., 2., 261., 280.,
2., 347., 592., 2., 0., 0., 0., 123., 596., 2., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0.]]),
keypoints[1])
np.testing.assert_array_equal(np.array([[10]]), num_keypoints[1])
def test_coco_panoptic():
data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic", decode=True, shuffle=False)
@ -139,15 +141,15 @@ def test_coco_panoptic():
num_iter += 1
assert num_iter == 2
assert image_shape[0] == (2268, 4032, 3)
assert np.array_equal(np.array([[472, 173, 36, 48], [340, 22, 154, 301], [486, 183, 30, 35]]), bbox[0])
assert np.array_equal(np.array([[1], [1], [2]]), category_id[0])
assert np.array_equal(np.array([[0], [0], [0]]), iscrowd[0])
assert np.array_equal(np.array([[705], [14062], [626]]), area[0])
np.testing.assert_array_equal(np.array([[472, 173, 36, 48], [340, 22, 154, 301], [486, 183, 30, 35]]), bbox[0])
np.testing.assert_array_equal(np.array([[1], [1], [2]]), category_id[0])
np.testing.assert_array_equal(np.array([[0], [0], [0]]), iscrowd[0])
np.testing.assert_array_equal(np.array([[705], [14062], [626]]), area[0])
assert image_shape[1] == (642, 675, 3)
assert np.array_equal(np.array([[103, 133, 229, 422], [243, 175, 93, 164]]), bbox[1])
assert np.array_equal(np.array([[1], [3]]), category_id[1])
assert np.array_equal(np.array([[0], [0]]), iscrowd[1])
assert np.array_equal(np.array([[43102], [6079]]), area[1])
np.testing.assert_array_equal(np.array([[103, 133, 229, 422], [243, 175, 93, 164]]), bbox[1])
np.testing.assert_array_equal(np.array([[1], [3]]), category_id[1])
np.testing.assert_array_equal(np.array([[0], [0]]), iscrowd[1])
np.testing.assert_array_equal(np.array([[43102], [6079]]), area[1])
def test_coco_detection_classindex():
data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)

View File

@ -38,7 +38,7 @@ def test_generator_0():
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
@ -60,7 +60,7 @@ def test_generator_1():
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
@ -82,9 +82,9 @@ def test_generator_2():
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["col0"], golden)
np.testing.assert_array_equal(item["col0"], golden)
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item["col1"], golden)
np.testing.assert_array_equal(item["col1"], golden)
i = i + 1
@ -102,7 +102,7 @@ def test_generator_3():
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
if i == 64:
i = 0
@ -122,7 +122,7 @@ def test_generator_4():
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([[i], [i + 1], [i + 2], [i + 3]])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 4
@ -142,7 +142,7 @@ def type_tester(t):
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([[i], [i + 1], [i + 2], [i + 3]], dtype=t)
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 4
@ -169,7 +169,7 @@ def type_tester_with_type_check(t, c):
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([[i], [i + 1], [i + 2], [i + 3]], dtype=t)
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 4
@ -204,7 +204,7 @@ def type_tester_with_type_check_2c(t, c):
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([[i], [i + 1], [i + 2], [i + 3]], dtype=t)
assert np.array_equal(item["data0"], golden)
np.testing.assert_array_equal(item["data0"], golden)
i = i + 4
@ -241,11 +241,11 @@ def test_generator_8():
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i * 3])
assert np.array_equal(item["out0"], golden)
np.testing.assert_array_equal(item["out0"], golden)
golden = np.array([[i * 7, (i + 1) * 7], [(i + 2) * 7, (i + 3) * 7]])
assert np.array_equal(item["out1"], golden)
np.testing.assert_array_equal(item["out1"], golden)
golden = np.array([[i + 1, i + 2], [i + 3, i + 4]])
assert np.array_equal(item["out2"], golden)
np.testing.assert_array_equal(item["out2"], golden)
i = i + 1
@ -269,14 +269,14 @@ def test_generator_9():
i = 0
for data1, data2 in zip(data1, data2): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(data1[0], golden)
np.testing.assert_array_equal(data1[0], golden)
golden = np.array([[i * 3, (i + 1) * 3], [(i + 2) * 3, (i + 3) * 3]])
assert np.array_equal(data1[1], golden)
np.testing.assert_array_equal(data1[1], golden)
golden = np.array([i * 3])
assert np.array_equal(data2[0], golden)
np.testing.assert_array_equal(data2[0], golden)
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(data2[1], golden)
np.testing.assert_array_equal(data2[1], golden)
i = i + 1
@ -295,11 +295,11 @@ def test_generator_10():
i = 0
for item in data1.create_tuple_iterator():
golden = np.array([i])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item[1], golden)
np.testing.assert_array_equal(item[1], golden)
golden = np.array([[i * 5, (i + 1) * 5], [(i + 2) * 5, (i + 3) * 5]])
assert np.array_equal(item[2], golden)
np.testing.assert_array_equal(item[2], golden)
i = i + 1
@ -321,9 +321,9 @@ def test_generator_11():
# len should be 2 because col0 is dropped (not included in columns_order)
assert len(item) == 2
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
golden = np.array([[i * 5, (i + 1) * 5], [(i + 2) * 5, (i + 3) * 5]])
assert np.array_equal(item[1], golden)
np.testing.assert_array_equal(item[1], golden)
i = i + 1
@ -342,9 +342,9 @@ def test_generator_12():
for item in data1.create_tuple_iterator():
assert len(item) == 2
golden = np.array([i * 5])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item[1], golden)
np.testing.assert_array_equal(item[1], golden)
i = i + 1
data1 = ds.GeneratorDataset(generator_mc(2048), ["col0", "col1"])
@ -355,9 +355,9 @@ def test_generator_12():
for item in data1.create_tuple_iterator():
assert len(item) == 2
golden = np.array([i * 5])
assert np.array_equal(item[1], golden)
np.testing.assert_array_equal(item[1], golden)
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
@ -376,18 +376,18 @@ def test_generator_13():
for item in data1.create_tuple_iterator():
assert len(item) == 2
golden = np.array([i * 5])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item[1], golden)
np.testing.assert_array_equal(item[1], golden)
i = i + 1
for item in data1.create_dict_iterator(): # each data is a dictionary
# len should be 2 because col0 is dropped (not included in columns_order)
assert len(item) == 2
golden = np.array([i * 5])
assert np.array_equal(item["out0"], golden)
np.testing.assert_array_equal(item["out0"], golden)
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item["col1"], golden)
np.testing.assert_array_equal(item["col1"], golden)
i = i + 1
@ -402,7 +402,7 @@ def test_generator_14():
i = 0
for data in ds1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(data["data"], golden)
np.testing.assert_array_equal(data["data"], golden)
i = i + 1
if i == 256:
i = 0
@ -420,7 +420,7 @@ def test_generator_15():
i = 0
for data in ds1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(data["data"], golden)
np.testing.assert_array_equal(data["data"], golden)
i = i + 1
if i == 256:
i = 0
@ -439,9 +439,9 @@ def test_generator_16():
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["col0"], golden)
np.testing.assert_array_equal(item["col0"], golden)
golden = np.array([i + 1])
assert np.array_equal(item["col1"], golden)
np.testing.assert_array_equal(item["col1"], golden)
i = i + 1
@ -459,9 +459,9 @@ def test_generator_17():
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["col0"], golden)
np.testing.assert_array_equal(item["col0"], golden)
golden = np.array([i + 1])
assert np.array_equal(item["col1"], golden)
np.testing.assert_array_equal(item["col1"], golden)
i = i + 1
@ -519,7 +519,7 @@ def test_generator_sequential_sampler():
i = 0
for data in ds1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(data["data"], golden)
np.testing.assert_array_equal(data["data"], golden)
i = i + 1
@ -537,7 +537,7 @@ def test_generator_distributed_sampler():
i = sid
for data in ds1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(data["data"], golden)
np.testing.assert_array_equal(data["data"], golden)
i = i + 8
@ -596,7 +596,7 @@ def type_tester_with_type_check_2c_schema(t, c):
i = 0
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([[i], [i + 1], [i + 2], [i + 3]], dtype=t)
assert np.array_equal(item["data0"], golden)
np.testing.assert_array_equal(item["data0"], golden)
i = i + 4

View File

@ -162,7 +162,7 @@ def test_tfrecord_schema():
for d1, d2 in zip(data1, data2):
for t1, t2 in zip(d1, d2):
assert np.array_equal(t1, t2)
np.testing.assert_array_equal(t1, t2)
def test_tfrecord_shuffle():
@ -174,7 +174,7 @@ def test_tfrecord_shuffle():
for d1, d2 in zip(data1, data2):
for t1, t2 in zip(d1, d2):
assert np.array_equal(t1, t2)
np.testing.assert_array_equal(t1, t2)
def test_tfrecord_shard():

View File

@ -136,7 +136,7 @@ def test_generator_dict_0():
# create the iterator inside the loop declaration
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
def test_generator_dict_1():
@ -154,7 +154,7 @@ def test_generator_dict_1():
# Create iterator outside the epoch for loop.
for item in data1.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
assert i == 64
@ -171,7 +171,7 @@ def test_generator_dict_2():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
assert i == 64
@ -193,7 +193,7 @@ def test_generator_dict_3():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
assert i == 64
# optional
@ -217,7 +217,7 @@ def test_generator_dict_4():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
assert i == 64
@ -240,7 +240,7 @@ def test_generator_dict_4_1():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
assert i == 64
@ -265,7 +265,7 @@ def test_generator_dict_4_2():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
assert i == 64
@ -287,7 +287,7 @@ def test_generator_dict_5():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
assert i == 64
@ -295,7 +295,7 @@ def test_generator_dict_5():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
assert i == 64
@ -320,7 +320,7 @@ def test_generator_tuple_0():
# create the iterator inside the loop declaration
for item in data1.create_tuple_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
def test_generator_tuple_1():
@ -338,7 +338,7 @@ def test_generator_tuple_1():
# Create iterator outside the epoch for loop.
for item in data1.create_tuple_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64
@ -355,7 +355,7 @@ def test_generator_tuple_2():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64
@ -377,7 +377,7 @@ def test_generator_tuple_3():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64
# optional
@ -401,7 +401,7 @@ def test_generator_tuple_4():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64
@ -424,7 +424,7 @@ def test_generator_tuple_5():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64
@ -432,7 +432,7 @@ def test_generator_tuple_5():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64
@ -457,7 +457,7 @@ def test_generator_tuple_repeat_1():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i % 64])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64 * 2
@ -465,7 +465,7 @@ def test_generator_tuple_repeat_1():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i % 64])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64 * 2
@ -492,7 +492,7 @@ def test_generator_tuple_repeat_repeat_1():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i % 64])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64 * 2 * 3
@ -500,7 +500,7 @@ def test_generator_tuple_repeat_repeat_1():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i % 64])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64 * 2 * 3
@ -526,7 +526,7 @@ def test_generator_tuple_repeat_repeat_2():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i % 64])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64 * 2 * 3
# optional
@ -551,7 +551,7 @@ def test_generator_tuple_repeat_repeat_3():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i % 64])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64 * 2 * 3
@ -559,7 +559,7 @@ def test_generator_tuple_repeat_repeat_3():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i % 64])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64 * 2 * 3
@ -579,7 +579,7 @@ def test_generator_reusedataset():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i % 64])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64 * 2
@ -589,7 +589,7 @@ def test_generator_reusedataset():
i = 0
for item in iter1: # each data is a dictionary
golden = np.array([i % 64])
assert np.array_equal(item[0], golden)
np.testing.assert_array_equal(item[0], golden)
i = i + 1
assert i == 64 * 2 * 3
@ -600,7 +600,7 @@ def test_generator_reusedataset():
sample = 0
for item in iter1: # each data is a dictionary
golden = np.array([[i % 64], [(i + 1) % 64]])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 2
sample = sample + 1
assert sample == 64 * 3

View File

@ -67,7 +67,7 @@ def test_shuffle():
for d1, d2 in zip(data1, data2):
for t1, t2 in zip(d1, d2):
assert np.array_equal(t1, t2)
np.testing.assert_array_equal(t1, t2)
ds.config.set_seed(1)
DATA_ALL_FILE = "../data/dataset/testTextFileDataset/*"
@ -77,7 +77,7 @@ def test_shuffle():
for d1, d2 in zip(data1, data2):
for t1, t2 in zip(d1, d2):
assert np.array_equal(t1, t2)
np.testing.assert_array_equal(t1, t2)
ds.config.set_seed(1)
TRAIN_FILE = '../data/dataset/testCLUE/afqmc/train.json'
@ -87,7 +87,7 @@ def test_shuffle():
for d1, d2 in zip(data1, data2):
for t1, t2 in zip(d1, d2):
assert np.array_equal(t1, t2)
np.testing.assert_array_equal(t1, t2)
if __name__ == "__main__":

View File

@ -63,8 +63,8 @@ def test_batch_padding_01():
data1 = data1.batch(batch_size=2, drop_remainder=False, pad_info={"col2d": ([2, 2], -2), "col1d": ([2], -1)})
data1 = data1.repeat(2)
for data in data1.create_dict_iterator():
assert np.array_equal([[0, -1], [1, -1]], data["col1d"])
assert np.array_equal([[[100, -2], [200, -2]], [[101, -2], [201, -2]]], data["col2d"])
np.testing.assert_array_equal([[0, -1], [1, -1]], data["col1d"])
np.testing.assert_array_equal([[[100, -2], [200, -2]], [[101, -2], [201, -2]]], data["col2d"])
def test_batch_padding_02():
@ -72,8 +72,8 @@ def test_batch_padding_02():
data1 = data1.batch(batch_size=2, drop_remainder=False, pad_info={"col2d": ([1, 2], -2)})
data1 = data1.repeat(2)
for data in data1.create_dict_iterator():
assert np.array_equal([[0], [1]], data["col1d"])
assert np.array_equal([[[100, -2]], [[101, -2]]], data["col2d"])
np.testing.assert_array_equal([[0], [1]], data["col1d"])
np.testing.assert_array_equal([[[100, -2]], [[101, -2]]], data["col2d"])
def test_batch_padding_03():
@ -83,10 +83,10 @@ def test_batch_padding_03():
res = dict()
for ind, data in enumerate(data1.create_dict_iterator()):
res[ind] = data["col"].copy()
assert np.array_equal(res[0], [[0, -1], [0, 1]])
assert np.array_equal(res[1], [[0, 1, 2, -1], [0, 1, 2, 3]])
assert np.array_equal(res[2], [[0, -1], [0, 1]])
assert np.array_equal(res[3], [[0, 1, 2, -1], [0, 1, 2, 3]])
np.testing.assert_array_equal(res[0], [[0, -1], [0, 1]])
np.testing.assert_array_equal(res[1], [[0, 1, 2, -1], [0, 1, 2, 3]])
np.testing.assert_array_equal(res[2], [[0, -1], [0, 1]])
np.testing.assert_array_equal(res[3], [[0, 1, 2, -1], [0, 1, 2, 3]])
def test_batch_padding_04():
@ -94,8 +94,8 @@ def test_batch_padding_04():
data1 = data1.batch(batch_size=2, drop_remainder=False, pad_info={}) # pad automatically
data1 = data1.repeat(2)
for data in data1.create_dict_iterator():
assert np.array_equal(data["col1"], [[0, 0], [0, 1]])
assert np.array_equal(data["col2"], [[100, 0], [100, 101]])
np.testing.assert_array_equal(data["col1"], [[0, 0], [0, 1]])
np.testing.assert_array_equal(data["col2"], [[100, 0], [100, 101]])
def test_batch_padding_05():
@ -103,9 +103,9 @@ def test_batch_padding_05():
data1 = data1.batch(batch_size=3, drop_remainder=False,
pad_info={"col2": ([2, None], -2), "col1": (None, -1)}) # pad automatically
for data in data1.create_dict_iterator():
assert np.array_equal(data["col1"], [[[0, -1, -1]], [[0, 1, -1]], [[0, 1, 2]]])
assert np.array_equal(data["col2"], [[[100, -2, -2], [-2, -2, -2]], [[100, 101, -2], [-2, -2, -2]],
[[100, 101, 102], [-2, -2, -2]]])
np.testing.assert_array_equal(data["col1"], [[[0, -1, -1]], [[0, 1, -1]], [[0, 1, 2]]])
np.testing.assert_array_equal(data["col2"], [[[100, -2, -2], [-2, -2, -2]], [[100, 101, -2], [-2, -2, -2]],
[[100, 101, 102], [-2, -2, -2]]])
def batch_padding_performance_3d():
@ -197,7 +197,7 @@ def test_pad_via_map():
res_from_batch = pad_batch_config()
assert len(res_from_batch) == len(res_from_batch)
for i, _ in enumerate(res_from_map):
assert np.array_equal(res_from_map[i], res_from_batch[i])
np.testing.assert_array_equal(res_from_map[i], res_from_batch[i])
if __name__ == '__main__':

View File

@ -39,7 +39,7 @@ def test_case_0():
for item in data1.create_dict_iterator(): # each data is a dictionary
# In this test, the dataset is 2x2 sequential tensors
golden = np.array([[i * 2, (i + 1) * 2], [(i + 2) * 2, (i + 3) * 2]])
assert np.array_equal(item["out"], golden)
np.testing.assert_array_equal(item["out"], golden)
i = i + 4
@ -60,9 +60,9 @@ def test_case_1():
for item in data1.create_dict_iterator(): # each data is a dictionary
# In this test, the dataset is 2x2 sequential tensors
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item["out0"], golden)
np.testing.assert_array_equal(item["out0"], golden)
golden = np.array([[i * 2, (i + 1) * 2], [(i + 2) * 2, (i + 3) * 2]])
assert np.array_equal(item["out1"], golden)
np.testing.assert_array_equal(item["out1"], golden)
i = i + 4
@ -84,7 +84,7 @@ def test_case_2():
for item in data1.create_dict_iterator(): # each data is a dictionary
# In this test, the dataset is 2x2 sequential tensors
golden = np.array([[i * 2, (i + 1) * 2], [(i + 2) * 2, (i + 3) * 2]])
assert np.array_equal(item["out"], golden)
np.testing.assert_array_equal(item["out"], golden)
i = i + 4
@ -106,11 +106,11 @@ def test_case_3():
for item in data1.create_dict_iterator(): # each data is a dictionary
# In this test, the dataset is 2x2 sequential tensors
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item["out0"], golden)
np.testing.assert_array_equal(item["out0"], golden)
golden = np.array([[i * 2, (i + 1) * 2], [(i + 2) * 2, (i + 3) * 2]])
assert np.array_equal(item["out1"], golden)
np.testing.assert_array_equal(item["out1"], golden)
golden = np.array([[i * 2 + 1, (i + 1) * 2 + 1], [(i + 2) * 2 + 1, (i + 3) * 2 + 1]])
assert np.array_equal(item["out2"], golden)
np.testing.assert_array_equal(item["out2"], golden)
i = i + 4
@ -132,11 +132,11 @@ def test_case_4():
for item in data1.create_dict_iterator(): # each data is a dictionary
# In this test, the dataset is 2x2 sequential tensors
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item["out0"], golden)
np.testing.assert_array_equal(item["out0"], golden)
golden = np.array([[i * 2, (i + 1) * 2], [(i + 2) * 2, (i + 3) * 2]])
assert np.array_equal(item["out1"], golden)
np.testing.assert_array_equal(item["out1"], golden)
golden = np.array([[i * 2 + 1, (i + 1) * 2 + 1], [(i + 2) * 2 + 1, (i + 3) * 2 + 1]])
assert np.array_equal(item["out2"], golden)
np.testing.assert_array_equal(item["out2"], golden)
i = i + 4
@ -159,7 +159,7 @@ def test_case_5():
for item in data1.create_dict_iterator(): # each data is a dictionary
# In this test, the dataset is 2x2 sequential tensors
golden = np.array([[1, 1], [1, 1]])
assert np.array_equal(item["out"], golden)
np.testing.assert_array_equal(item["out"], golden)
def test_case_6():
@ -178,7 +178,7 @@ def test_case_6():
for item in data1.create_dict_iterator(): # each data is a dictionary
# In this test, the dataset is 2x2 sequential tensors
golden = np.array([[i * 4, (i + 1) * 4], [(i + 2) * 4, (i + 3) * 4]])
assert np.array_equal(item["out"], golden)
np.testing.assert_array_equal(item["out"], golden)
i = i + 4
@ -198,7 +198,7 @@ def test_case_7():
for item in data1.create_dict_iterator(): # each data is a dictionary
# In this test, the dataset is 2x2 sequential tensors
golden = np.array([[i * 2, (i + 1) * 2], [(i + 2) * 2, (i + 3) * 2]])
assert np.array_equal(item["out"], golden)
np.testing.assert_array_equal(item["out"], golden)
i = i + 4
@ -221,11 +221,11 @@ def test_case_8():
for item in data1.create_dict_iterator(): # each data is a dictionary
# In this test, the dataset is 2x2 sequential tensors
golden = np.array([[i, i + 1], [i + 2, i + 3]])
assert np.array_equal(item["out0"], golden)
np.testing.assert_array_equal(item["out0"], golden)
golden = np.array([[i * 2, (i + 1) * 2], [(i + 2) * 2, (i + 3) * 2]])
assert np.array_equal(item["out1"], golden)
np.testing.assert_array_equal(item["out1"], golden)
golden = np.array([[i * 2 + 1, (i + 1) * 2 + 1], [(i + 2) * 2 + 1, (i + 3) * 2 + 1]])
assert np.array_equal(item["out2"], golden)
np.testing.assert_array_equal(item["out2"], golden)
i = i + 4
@ -246,7 +246,7 @@ def test_case_9():
for item in data1.create_dict_iterator(): # each data is a dictionary
# In this test, the dataset is 2x2 sequential tensors
golden = np.array([[i * 2 + 3, (i + 1) * 2 + 3], [(i + 2) * 2 + 3, (i + 3) * 2 + 3]])
assert np.array_equal(item["out"], golden)
np.testing.assert_array_equal(item["out"], golden)
i = i + 4

View File

@ -167,7 +167,7 @@ def test_nested_repeat5():
data = data.repeat(3)
for _, d in enumerate(data):
assert np.array_equal(d[0], np.asarray([[0], [1], [2]]))
np.testing.assert_array_equal(d[0], np.asarray([[0], [1], [2]]))
assert sum([1 for _ in data]) == 6
@ -180,7 +180,7 @@ def test_nested_repeat6():
data = data.repeat(3)
for _, d in enumerate(data):
assert np.array_equal(d[0], np.asarray([[0], [1], [2]]))
np.testing.assert_array_equal(d[0], np.asarray([[0], [1], [2]]))
assert sum([1 for _ in data]) == 6
@ -193,7 +193,7 @@ def test_nested_repeat7():
data = data.batch(3)
for _, d in enumerate(data):
assert np.array_equal(d[0], np.asarray([[0], [1], [2]]))
np.testing.assert_array_equal(d[0], np.asarray([[0], [1], [2]]))
assert sum([1 for _ in data]) == 6
@ -207,9 +207,9 @@ def test_nested_repeat8():
for i, d in enumerate(data):
if i % 2 == 0:
assert np.array_equal(d[0], np.asarray([[0], [1]]))
np.testing.assert_array_equal(d[0], np.asarray([[0], [1]]))
else:
assert np.array_equal(d[0], np.asarray([[2]]))
np.testing.assert_array_equal(d[0], np.asarray([[2]]))
assert sum([1 for _ in data]) == 6 * 2

View File

@ -324,7 +324,7 @@ def test_case_03(add_and_remove_cv_file):
i = 0
for item in d2.create_dict_iterator(): # each data is a dictionary
golden = np.array([i])
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 1
@ -354,7 +354,7 @@ def type_tester(t):
for item in d2.create_dict_iterator(): # each data is a dictionary
golden = np.array([[i], [i + 1], [i + 2], [i + 3]], dtype=t)
logger.info(item)
assert np.array_equal(item["data"], golden)
np.testing.assert_array_equal(item["data"], golden)
i = i + 4
if i == 64:
i = 0

View File

@ -79,12 +79,12 @@ def test_imagefolder(remove_json_files=True):
# Iterate and compare the data in the original pipeline (data1) against the deserialized pipeline (data2)
for item1, item2, item3, item4 in zip(data1.create_dict_iterator(), data2.create_dict_iterator(),
data3.create_dict_iterator(), data4.create_dict_iterator()):
assert np.array_equal(item1['image'], item2['image'])
assert np.array_equal(item1['image'], item3['image'])
assert np.array_equal(item1['label'], item2['label'])
assert np.array_equal(item1['label'], item3['label'])
assert np.array_equal(item3['image'], item4['image'])
assert np.array_equal(item3['label'], item4['label'])
np.testing.assert_array_equal(item1['image'], item2['image'])
np.testing.assert_array_equal(item1['image'], item3['image'])
np.testing.assert_array_equal(item1['label'], item2['label'])
np.testing.assert_array_equal(item1['label'], item3['label'])
np.testing.assert_array_equal(item3['image'], item4['image'])
np.testing.assert_array_equal(item3['label'], item4['label'])
num_samples += 1
logger.info("Number of data in data1: {}".format(num_samples))
@ -119,10 +119,10 @@ def test_mnist_dataset(remove_json_files=True):
num = 0
for data1, data2, data3 in zip(data1.create_dict_iterator(), data2.create_dict_iterator(),
data3.create_dict_iterator()):
assert np.array_equal(data1['image'], data2['image'])
assert np.array_equal(data1['image'], data3['image'])
assert np.array_equal(data1['label'], data2['label'])
assert np.array_equal(data1['label'], data3['label'])
np.testing.assert_array_equal(data1['image'], data2['image'])
np.testing.assert_array_equal(data1['image'], data3['image'])
np.testing.assert_array_equal(data1['label'], data2['label'])
np.testing.assert_array_equal(data1['label'], data3['label'])
num += 1
logger.info("mnist total num samples is {}".format(str(num)))
@ -160,10 +160,10 @@ def test_zip_dataset(remove_json_files=True):
num_cols = len(d0)
offset = 0
for t1 in d0:
assert np.array_equal(t1, d3[offset])
assert np.array_equal(t1, d3[offset + num_cols])
assert np.array_equal(t1, d4[offset])
assert np.array_equal(t1, d4[offset + num_cols])
np.testing.assert_array_equal(t1, d3[offset])
np.testing.assert_array_equal(t1, d3[offset + num_cols])
np.testing.assert_array_equal(t1, d4[offset])
np.testing.assert_array_equal(t1, d4[offset + num_cols])
offset += 1
rows += 1
assert rows == 12
@ -199,7 +199,7 @@ def test_random_crop():
for item1, item1_1, item2 in zip(data1.create_dict_iterator(), data1_1.create_dict_iterator(),
data2.create_dict_iterator()):
assert np.array_equal(item1['image'], item1_1['image'])
np.testing.assert_array_equal(item1['image'], item1_1['image'])
_ = item2["image"]
# Restore configuration num_parallel_workers

View File

@ -36,22 +36,22 @@ def test_batch_corner_cases():
tst1, tst2, tst3, tst4 = [], [], [], []
# case 1 & 2, where batch_size is greater than the entire epoch, with drop equals to both val
test_repeat_batch(gen_num=2, repeats=4, batch_size=7, drop=False, res=tst1)
assert np.array_equal(np.array([[0], [1], [0], [1], [0], [1], [0]]), tst1[0]), "\nATTENTION BATCH FAILED\n"
assert np.array_equal(np.array([[1]]), tst1[1]), "\nATTENTION TEST BATCH FAILED\n"
np.testing.assert_array_equal(np.array([[0], [1], [0], [1], [0], [1], [0]]), tst1[0], "\nATTENTION BATCH FAILED\n")
np.testing.assert_array_equal(np.array([[1]]), tst1[1], "\nATTENTION TEST BATCH FAILED\n")
assert len(tst1) == 2, "\nATTENTION TEST BATCH FAILED\n"
test_repeat_batch(gen_num=2, repeats=4, batch_size=5, drop=True, res=tst2)
assert np.array_equal(np.array([[0], [1], [0], [1], [0]]), tst2[0]), "\nATTENTION BATCH FAILED\n"
np.testing.assert_array_equal(np.array([[0], [1], [0], [1], [0]]), tst2[0], "\nATTENTION BATCH FAILED\n")
assert len(tst2) == 1, "\nATTENTION TEST BATCH FAILED\n"
# case 3 & 4, batch before repeat with different drop
test_batch_repeat(gen_num=5, repeats=2, batch_size=4, drop=True, res=tst3)
assert np.array_equal(np.array([[0], [1], [2], [3]]), tst3[0]), "\nATTENTION BATCH FAILED\n"
assert np.array_equal(tst3[0], tst3[1]), "\nATTENTION BATCH FAILED\n"
np.testing.assert_array_equal(np.array([[0], [1], [2], [3]]), tst3[0], "\nATTENTION BATCH FAILED\n")
np.testing.assert_array_equal(tst3[0], tst3[1], "\nATTENTION BATCH FAILED\n")
assert len(tst3) == 2, "\nATTENTION BATCH FAILED\n"
test_batch_repeat(gen_num=5, repeats=2, batch_size=4, drop=False, res=tst4)
assert np.array_equal(np.array([[0], [1], [2], [3]]), tst4[0]), "\nATTENTION BATCH FAILED\n"
assert np.array_equal(tst4[0], tst4[2]), "\nATTENTION BATCH FAILED\n"
assert np.array_equal(tst4[1], np.array([[4]])), "\nATTENTION BATCH FAILED\n"
assert np.array_equal(tst4[1], tst4[3]), "\nATTENTION BATCH FAILED\n"
np.testing.assert_array_equal(np.array([[0], [1], [2], [3]]), tst4[0], "\nATTENTION BATCH FAILED\n")
np.testing.assert_array_equal(tst4[0], tst4[2], "\nATTENTION BATCH FAILED\n")
np.testing.assert_array_equal(tst4[1], np.array([[4]]), "\nATTENTION BATCH FAILED\n")
np.testing.assert_array_equal(tst4[1], tst4[3], "\nATTENTION BATCH FAILED\n")
assert len(tst4) == 4, "\nATTENTION BATCH FAILED\n"

View File

@ -56,7 +56,7 @@ def _compare_to_golden(golden_ref_dir, result_dict):
"""
test_array = np.array(list(result_dict.values()))
golden_array = np.load(golden_ref_dir, allow_pickle=True)['arr_0']
assert np.array_equal(test_array, golden_array)
np.testing.assert_array_equal(test_array, golden_array)
def _compare_to_golden_dict(golden_ref_dir, result_dict):