!27741 Add test util function to write object into json
Merge pull request !27741 from sabrinasun_59ee/test
This commit is contained in:
commit
57bbfe6453
|
@ -23,6 +23,57 @@ import csv
|
|||
import numpy as np
|
||||
|
||||
|
||||
def write_watchpoint_to_json(watchpoint_hits):
|
||||
parameter_json = []
|
||||
for p, _ in enumerate(watchpoint_hits.parameters):
|
||||
parameter = "parameter" + str(p)
|
||||
parameter_json.append({
|
||||
parameter: {
|
||||
'name': watchpoint_hits.parameters[p].name,
|
||||
'disabled': watchpoint_hits.parameters[p].disabled,
|
||||
'value': watchpoint_hits.parameters[p].value,
|
||||
'hit': watchpoint_hits.parameters[p].hit,
|
||||
'actual_value': watchpoint_hits.parameters[p].actual_value
|
||||
}
|
||||
})
|
||||
wp = {
|
||||
'name': watchpoint_hits.name,
|
||||
'slot': watchpoint_hits.slot,
|
||||
'condition': watchpoint_hits.condition,
|
||||
'watchpoint_id': watchpoint_hits.watchpoint_id,
|
||||
'parameter': parameter_json,
|
||||
'error_code': watchpoint_hits.error_code,
|
||||
'rank_id': watchpoint_hits.rank_id,
|
||||
'root_graph_id': watchpoint_hits.root_graph_id
|
||||
}
|
||||
return wp
|
||||
|
||||
def write_tensor_to_json(tensor_info, tensor_data):
|
||||
data = np.frombuffer(
|
||||
tensor_data.data_ptr, np.uint8, tensor_data.data_size).tolist()
|
||||
py_byte_size = len(tensor_data.data_ptr)
|
||||
c_byte_size = tensor_data.data_size
|
||||
if c_byte_size != py_byte_size:
|
||||
print("The python byte size of " + str(py_byte_size) +
|
||||
" does not match the C++ byte size of " + str(c_byte_size) + "\n")
|
||||
tensor = {
|
||||
'tensor_info': {
|
||||
'node_name': tensor_info.node_name,
|
||||
'slot': tensor_info.slot,
|
||||
'iteration': tensor_info.iteration,
|
||||
'rank_id': tensor_info.rank_id,
|
||||
'root_graph_id': tensor_info.root_graph_id,
|
||||
'is_output': tensor_info.is_output
|
||||
},
|
||||
'tensor_data': {
|
||||
'data': data,
|
||||
'size_in_bytes': tensor_data.data_size,
|
||||
'debugger_dtype': tensor_data.dtype,
|
||||
'shape': tensor_data.shape
|
||||
}
|
||||
}
|
||||
return tensor
|
||||
|
||||
def build_dump_structure(path, tensor_name_list, tensor_list, net_name, tensor_info_list):
|
||||
"""Build dump file structure from tensor_list."""
|
||||
ranks_run_history = {}
|
||||
|
|
|
@ -23,7 +23,7 @@ import mindspore.offline_debug.dbg_services as d
|
|||
import numpy as np
|
||||
import pytest
|
||||
from tests.security_utils import security_off_wrap
|
||||
from dump_test_utils import build_dump_structure
|
||||
from dump_test_utils import build_dump_structure, write_tensor_to_json
|
||||
|
||||
GENERATE_GOLDEN = False
|
||||
tensor_json = []
|
||||
|
@ -109,51 +109,18 @@ def compare_expect_actual_result(tensor_info_list, tensor_data_list, test_index,
|
|||
with open(golden_file) as f:
|
||||
expected_list = json.load(f)
|
||||
for x, (tensor_info, tensor_data) in enumerate(zip(tensor_info_list, tensor_data_list)):
|
||||
test_id = "tensor_"+ str(test_index+x+1)
|
||||
info = expected_list[x+test_index][test_id]
|
||||
assert tensor_info.node_name == info['tensor_info']['node_name']
|
||||
assert tensor_info.slot == info['tensor_info']['slot']
|
||||
assert tensor_info.iteration == info['tensor_info']['iteration']
|
||||
assert tensor_info.rank_id == info['tensor_info']['rank_id']
|
||||
assert tensor_info.root_graph_id == info['tensor_info']['root_graph_id']
|
||||
assert tensor_info.is_output == info['tensor_info']['is_output']
|
||||
actual_data = np.frombuffer(
|
||||
tensor_data.data_ptr, np.uint8, tensor_data.data_size).tolist()
|
||||
assert actual_data == info['tensor_data']['data']
|
||||
assert tensor_data.data_size == info['tensor_data']['size_in_bytes']
|
||||
assert tensor_data.dtype == info['tensor_data']['debugger_dtype']
|
||||
assert tensor_data.shape == info['tensor_data']['shape']
|
||||
tensor_id = "tensor_" + str(test_index + x + 1)
|
||||
expect_tensor = expected_list[x + test_index][tensor_id]
|
||||
actual_tensor = write_tensor_to_json(tensor_info, tensor_data)
|
||||
assert expect_tensor == actual_tensor
|
||||
|
||||
|
||||
def print_read_tensors(tensor_info_list, tensor_data_list, test_index, is_print, test_name):
|
||||
"""Print read tensors result if GENERATE_GOLDEN is True."""
|
||||
for x, (tensor_info, tensor_data) in enumerate(zip(tensor_info_list, tensor_data_list)):
|
||||
tensor = "tensor_" + str(test_index+x+1)
|
||||
data = np.frombuffer(
|
||||
tensor_data.data_ptr, np.uint8, tensor_data.data_size).tolist()
|
||||
py_byte_size = len(tensor_data.data_ptr)
|
||||
c_byte_size = tensor_data.data_size
|
||||
if c_byte_size != py_byte_size:
|
||||
print("The python byte size of " + str(py_byte_size) +
|
||||
" does not match the C++ byte size of " + str(c_byte_size) + "\n")
|
||||
tensor_json.append({
|
||||
tensor: {
|
||||
'tensor_info': {
|
||||
'node_name': tensor_info.node_name,
|
||||
'slot': tensor_info.slot,
|
||||
'iteration': tensor_info.iteration,
|
||||
'rank_id': tensor_info.rank_id,
|
||||
'root_graph_id': tensor_info.root_graph_id,
|
||||
'is_output': tensor_info.is_output
|
||||
},
|
||||
'tensor_data': {
|
||||
'data': data,
|
||||
'size_in_bytes': tensor_data.data_size,
|
||||
'debugger_dtype': tensor_data.dtype,
|
||||
'shape': tensor_data.shape
|
||||
}
|
||||
}
|
||||
})
|
||||
tensor_name = "tensor_" + str(test_index + x + 1)
|
||||
tensor = write_tensor_to_json(tensor_info, tensor_data)
|
||||
tensor_json.append({tensor_name: tensor})
|
||||
if is_print:
|
||||
with open(test_name + "_expected.json", "w") as dump_f:
|
||||
json.dump(tensor_json, dump_f, indent=4, separators=(',', ': '))
|
||||
|
|
|
@ -24,7 +24,7 @@ import numpy as np
|
|||
import pytest
|
||||
import mindspore.offline_debug.dbg_services as d
|
||||
from tests.security_utils import security_off_wrap
|
||||
from dump_test_utils import build_dump_structure
|
||||
from dump_test_utils import build_dump_structure, write_watchpoint_to_json
|
||||
|
||||
GENERATE_GOLDEN = False
|
||||
watchpoint_hits_json = []
|
||||
|
@ -181,8 +181,8 @@ def run_overflow_watchpoint(is_overflow):
|
|||
byte_list.append(task_id + 1)
|
||||
else:
|
||||
byte_list.append(0)
|
||||
newFileByteArray = bytearray(byte_list)
|
||||
f.write(bytes(newFileByteArray))
|
||||
new_byte_array = bytearray(byte_list)
|
||||
f.write(bytes(new_byte_array))
|
||||
debugger_backend = d.DbgServices(dump_file_path=tmp_dir)
|
||||
debugger_backend.initialize(net_name="Add", is_sync_mode=False)
|
||||
debugger_backend.add_watchpoint(watchpoint_id=1, watch_condition=2,
|
||||
|
@ -224,51 +224,16 @@ def compare_expect_actual_result(watchpoint_hits_list, test_index, test_name):
|
|||
expected_list = json.load(f)
|
||||
for x, watchpoint_hits in enumerate(watchpoint_hits_list):
|
||||
test_id = "watchpoint_hit" + str(test_index + x + 1)
|
||||
info = expected_list[x + test_index][test_id]
|
||||
assert watchpoint_hits.name == info['name']
|
||||
assert watchpoint_hits.slot == info['slot']
|
||||
assert watchpoint_hits.condition == info['condition']
|
||||
assert watchpoint_hits.watchpoint_id == info['watchpoint_id']
|
||||
assert watchpoint_hits.error_code == info['error_code']
|
||||
assert watchpoint_hits.rank_id == info['rank_id']
|
||||
assert watchpoint_hits.root_graph_id == info['root_graph_id']
|
||||
for p, _ in enumerate(watchpoint_hits.parameters):
|
||||
parameter = "parameter" + str(p)
|
||||
assert watchpoint_hits.parameters[p].name == info['parameter'][p][parameter]['name']
|
||||
assert watchpoint_hits.parameters[p].disabled == info['parameter'][p][parameter]['disabled']
|
||||
assert watchpoint_hits.parameters[p].value == info['parameter'][p][parameter]['value']
|
||||
assert watchpoint_hits.parameters[p].hit == info['parameter'][p][parameter]['hit']
|
||||
assert watchpoint_hits.parameters[p].actual_value == info['parameter'][p][parameter]['actual_value']
|
||||
|
||||
expect_wp = expected_list[x + test_index][test_id]
|
||||
actual_wp = write_watchpoint_to_json(watchpoint_hits)
|
||||
assert actual_wp == expect_wp
|
||||
|
||||
def print_watchpoint_hits(watchpoint_hits_list, test_index, is_print, test_name):
|
||||
"""Print watchpoint hits."""
|
||||
for x, watchpoint_hits in enumerate(watchpoint_hits_list):
|
||||
parameter_json = []
|
||||
for p, _ in enumerate(watchpoint_hits.parameters):
|
||||
parameter = "parameter" + str(p)
|
||||
parameter_json.append({
|
||||
parameter: {
|
||||
'name': watchpoint_hits.parameters[p].name,
|
||||
'disabled': watchpoint_hits.parameters[p].disabled,
|
||||
'value': watchpoint_hits.parameters[p].value,
|
||||
'hit': watchpoint_hits.parameters[p].hit,
|
||||
'actual_value': watchpoint_hits.parameters[p].actual_value
|
||||
}
|
||||
})
|
||||
watchpoint_hit = "watchpoint_hit" + str(test_index + x + 1)
|
||||
watchpoint_hits_json.append({
|
||||
watchpoint_hit: {
|
||||
'name': watchpoint_hits.name,
|
||||
'slot': watchpoint_hits.slot,
|
||||
'condition': watchpoint_hits.condition,
|
||||
'watchpoint_id': watchpoint_hits.watchpoint_id,
|
||||
'parameter': parameter_json,
|
||||
'error_code': watchpoint_hits.error_code,
|
||||
'rank_id': watchpoint_hits.rank_id,
|
||||
'root_graph_id': watchpoint_hits.root_graph_id
|
||||
}
|
||||
})
|
||||
wp = write_watchpoint_to_json(watchpoint_hits)
|
||||
watchpoint_hits_json.append({watchpoint_hit: wp})
|
||||
if is_print:
|
||||
with open(test_name + "_expected.json", "w") as dump_f:
|
||||
json.dump(watchpoint_hits_json, dump_f, indent=4, separators=(',', ': '))
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"slot": 1,
|
||||
"condition": 6,
|
||||
"watchpoint_id": 1,
|
||||
"paremeter": [
|
||||
"parameter": [
|
||||
{
|
||||
"parameter0": {
|
||||
"name": "param",
|
||||
|
@ -27,7 +27,7 @@
|
|||
"slot": 0,
|
||||
"condition": 6,
|
||||
"watchpoint_id": 2,
|
||||
"paremeter": [
|
||||
"parameter": [
|
||||
{
|
||||
"parameter0": {
|
||||
"name": "param",
|
||||
|
@ -49,7 +49,7 @@
|
|||
"slot": 0,
|
||||
"condition": 18,
|
||||
"watchpoint_id": 3,
|
||||
"paremeter": [
|
||||
"parameter": [
|
||||
{
|
||||
"parameter0": {
|
||||
"name": "abs_mean_update_ratio_gt",
|
||||
|
|
|
@ -22,6 +22,90 @@ import bisect
|
|||
import csv
|
||||
import numpy as np
|
||||
|
||||
def write_watchpoint_to_json(watchpoint_hits):
|
||||
parameter_json = []
|
||||
for p, _ in enumerate(watchpoint_hits.parameters):
|
||||
parameter = "parameter" + str(p)
|
||||
parameter_json.append({
|
||||
parameter: {
|
||||
'name': watchpoint_hits.parameters[p].name,
|
||||
'disabled': watchpoint_hits.parameters[p].disabled,
|
||||
'value': watchpoint_hits.parameters[p].value,
|
||||
'hit': watchpoint_hits.parameters[p].hit,
|
||||
'actual_value': watchpoint_hits.parameters[p].actual_value
|
||||
}
|
||||
})
|
||||
wp = {
|
||||
'name': watchpoint_hits.name,
|
||||
'slot': watchpoint_hits.slot,
|
||||
'condition': watchpoint_hits.condition,
|
||||
'watchpoint_id': watchpoint_hits.watchpoint_id,
|
||||
'parameter': parameter_json,
|
||||
'error_code': watchpoint_hits.error_code,
|
||||
'rank_id': watchpoint_hits.rank_id,
|
||||
'root_graph_id': watchpoint_hits.root_graph_id
|
||||
}
|
||||
return wp
|
||||
|
||||
def write_tensor_to_json(tensor_info, tensor_data):
|
||||
data = np.frombuffer(
|
||||
tensor_data.data_ptr, np.uint8, tensor_data.data_size).tolist()
|
||||
py_byte_size = len(tensor_data.data_ptr)
|
||||
c_byte_size = tensor_data.data_size
|
||||
if c_byte_size != py_byte_size:
|
||||
print("The python byte size of " + str(py_byte_size) +
|
||||
" does not match the C++ byte size of " + str(c_byte_size) + "\n")
|
||||
tensor = {
|
||||
'tensor_info': {
|
||||
'node_name': tensor_info.node_name,
|
||||
'slot': tensor_info.slot,
|
||||
'iteration': tensor_info.iteration,
|
||||
'rank_id': tensor_info.rank_id,
|
||||
'root_graph_id': tensor_info.root_graph_id,
|
||||
'is_output': tensor_info.is_output
|
||||
},
|
||||
'tensor_data': {
|
||||
'data': data,
|
||||
'size_in_bytes': tensor_data.data_size,
|
||||
'debugger_dtype': tensor_data.dtype,
|
||||
'shape': tensor_data.shape
|
||||
}
|
||||
}
|
||||
return tensor
|
||||
|
||||
def write_tensor_stat_to_json(tensor_info_item, tensor_base, tensor_stat):
|
||||
tensor = {
|
||||
'tensor_info': {
|
||||
'node_name': tensor_info_item.node_name,
|
||||
'slot': tensor_info_item.slot,
|
||||
'iteration': tensor_info_item.iteration,
|
||||
'rank_id': tensor_info_item.rank_id,
|
||||
'root_graph_id': tensor_info_item.root_graph_id,
|
||||
'is_output': tensor_info_item.is_output
|
||||
},
|
||||
'tensor_base_info': {
|
||||
'size_in_bytes': tensor_base.data_size,
|
||||
'debugger_dtype': tensor_base.dtype,
|
||||
'shape': tensor_base.shape
|
||||
},
|
||||
'tensor_stat_info': {
|
||||
'size_in_bytes': tensor_stat.data_size,
|
||||
'debugger_dtype': tensor_stat.dtype,
|
||||
'shape': tensor_stat.shape,
|
||||
'is_bool': tensor_stat.is_bool,
|
||||
'max_vaue': tensor_stat.max_value,
|
||||
'min_value': tensor_stat.min_value,
|
||||
'avg_value': tensor_stat.avg_value,
|
||||
'count': tensor_stat.count,
|
||||
'neg_zero_count': tensor_stat.neg_zero_count,
|
||||
'pos_zero_count': tensor_stat.pos_zero_count,
|
||||
'nan_count': tensor_stat.nan_count,
|
||||
'neg_inf_count': tensor_stat.neg_inf_count,
|
||||
'pos_inf_count': tensor_stat.pos_inf_count,
|
||||
'zero_count': tensor_stat.zero_count
|
||||
}
|
||||
}
|
||||
return tensor
|
||||
|
||||
def build_dump_structure(tensor_name_list, tensor_list, net_name, tensor_info_list):
|
||||
"""Build dump file structure from tensor_list."""
|
||||
|
|
|
@ -22,7 +22,7 @@ import shutil
|
|||
import numpy as np
|
||||
|
||||
import mindspore.offline_debug.dbg_services as d
|
||||
from dump_test_utils import build_dump_structure_with_constant
|
||||
from dump_test_utils import build_dump_structure_with_constant, write_tensor_to_json
|
||||
from tests.security_utils import security_off_wrap
|
||||
|
||||
|
||||
|
@ -84,49 +84,16 @@ class TestOfflineReadConstantTensor:
|
|||
expected_list = json.load(f)
|
||||
for x, (tensor_info, tensor_data) in enumerate(zip(tensor_info_list, tensor_data_list)):
|
||||
tensor_id = "tensor_" + str(test_index + x + 1)
|
||||
info = expected_list[x+test_index][tensor_id]
|
||||
assert tensor_info.node_name == info['tensor_info']['node_name']
|
||||
assert tensor_info.slot == info['tensor_info']['slot']
|
||||
assert tensor_info.iteration == info['tensor_info']['iteration']
|
||||
assert tensor_info.rank_id == info['tensor_info']['rank_id']
|
||||
assert tensor_info.root_graph_id == info['tensor_info']['root_graph_id']
|
||||
assert tensor_info.is_output == info['tensor_info']['is_output']
|
||||
actual_data = np.frombuffer(
|
||||
tensor_data.data_ptr, np.uint8, tensor_data.data_size).tolist()
|
||||
assert actual_data == info['tensor_data']['data']
|
||||
assert tensor_data.data_size == info['tensor_data']['size_in_bytes']
|
||||
assert tensor_data.dtype == info['tensor_data']['debugger_dtype']
|
||||
assert tensor_data.shape == info['tensor_data']['shape']
|
||||
expect_tensor = expected_list[x + test_index][tensor_id]
|
||||
actual_tensor = write_tensor_to_json(tensor_info, tensor_data)
|
||||
assert expect_tensor == actual_tensor
|
||||
|
||||
def print_read_tensors(self, tensor_info_list, tensor_data_list, test_index, is_print):
|
||||
"""Print read tensors result if GENERATE_GOLDEN is True."""
|
||||
for x, (tensor_info, tensor_data) in enumerate(zip(tensor_info_list, tensor_data_list)):
|
||||
tensor = "tensor_" + str(test_index + x + 1)
|
||||
data = np.frombuffer(
|
||||
tensor_data.data_ptr, np.uint8, tensor_data.data_size).tolist()
|
||||
py_byte_size = len(tensor_data.data_ptr)
|
||||
c_byte_size = tensor_data.data_size
|
||||
if c_byte_size != py_byte_size:
|
||||
print("The python byte size of " + str(py_byte_size) +
|
||||
" does not match the C++ byte size of " + str(c_byte_size) + "\n")
|
||||
self.tensor_json.append({
|
||||
tensor: {
|
||||
'tensor_info': {
|
||||
'node_name': tensor_info.node_name,
|
||||
'slot': tensor_info.slot,
|
||||
'iteration': tensor_info.iteration,
|
||||
'rank_id': tensor_info.rank_id,
|
||||
'root_graph_id': tensor_info.root_graph_id,
|
||||
'is_output': tensor_info.is_output
|
||||
},
|
||||
'tensor_data': {
|
||||
'data': data,
|
||||
'size_in_bytes': tensor_data.data_size,
|
||||
'debugger_dtype': tensor_data.dtype,
|
||||
'shape': tensor_data.shape
|
||||
}
|
||||
}
|
||||
})
|
||||
tensor_name = "tensor_" + str(test_index + x + 1)
|
||||
tensor = write_tensor_to_json(tensor_info, tensor_data)
|
||||
self.tensor_json.append({tensor_name: tensor})
|
||||
if is_print:
|
||||
with open(self.test_name + "_expected.json", "w") as dump_f:
|
||||
json.dump(self.tensor_json, dump_f, indent=4, separators=(',', ': '))
|
||||
|
|
|
@ -21,7 +21,7 @@ import shutil
|
|||
import json
|
||||
import numpy as np
|
||||
import mindspore.offline_debug.dbg_services as d
|
||||
from dump_test_utils import build_dump_structure
|
||||
from dump_test_utils import build_dump_structure, write_tensor_to_json
|
||||
from tests.security_utils import security_off_wrap
|
||||
|
||||
|
||||
|
@ -131,50 +131,17 @@ class TestOfflineReadTensor:
|
|||
with open(golden_file) as f:
|
||||
expected_list = json.load(f)
|
||||
for x, (tensor_info, tensor_data) in enumerate(zip(tensor_info_list, tensor_data_list)):
|
||||
test_id = "tensor_"+ str(test_index+x+1)
|
||||
info = expected_list[x+test_index][test_id]
|
||||
assert tensor_info.node_name == info['tensor_info']['node_name']
|
||||
assert tensor_info.slot == info['tensor_info']['slot']
|
||||
assert tensor_info.iteration == info['tensor_info']['iteration']
|
||||
assert tensor_info.rank_id == info['tensor_info']['rank_id']
|
||||
assert tensor_info.root_graph_id == info['tensor_info']['root_graph_id']
|
||||
assert tensor_info.is_output == info['tensor_info']['is_output']
|
||||
actual_data = np.frombuffer(
|
||||
tensor_data.data_ptr, np.uint8, tensor_data.data_size).tolist()
|
||||
assert actual_data == info['tensor_data']['data']
|
||||
assert tensor_data.data_size == info['tensor_data']['size_in_bytes']
|
||||
assert tensor_data.dtype == info['tensor_data']['debugger_dtype']
|
||||
assert tensor_data.shape == info['tensor_data']['shape']
|
||||
tensor_id = "tensor_" + str(test_index + x + 1)
|
||||
expect_tensor = expected_list[x + test_index][tensor_id]
|
||||
actual_tensor = write_tensor_to_json(tensor_info, tensor_data)
|
||||
assert expect_tensor == actual_tensor
|
||||
|
||||
def print_read_tensors(self, tensor_info_list, tensor_data_list, test_index, is_print):
|
||||
"""Print read tensors result if GENERATE_GOLDEN is True."""
|
||||
for x, (tensor_info, tensor_data) in enumerate(zip(tensor_info_list, tensor_data_list)):
|
||||
tensor = "tensor_" + str(test_index+x+1)
|
||||
data = np.frombuffer(
|
||||
tensor_data.data_ptr, np.uint8, tensor_data.data_size).tolist()
|
||||
py_byte_size = len(tensor_data.data_ptr)
|
||||
c_byte_size = tensor_data.data_size
|
||||
if c_byte_size != py_byte_size:
|
||||
print("The python byte size of " + str(py_byte_size) +
|
||||
" does not match the C++ byte size of " + str(c_byte_size) + "\n")
|
||||
self.tensor_json.append({
|
||||
tensor: {
|
||||
'tensor_info': {
|
||||
'node_name': tensor_info.node_name,
|
||||
'slot': tensor_info.slot,
|
||||
'iteration': tensor_info.iteration,
|
||||
'rank_id': tensor_info.rank_id,
|
||||
'root_graph_id': tensor_info.root_graph_id,
|
||||
'is_output': tensor_info.is_output
|
||||
},
|
||||
'tensor_data': {
|
||||
'data': data,
|
||||
'size_in_bytes': tensor_data.data_size,
|
||||
'debugger_dtype': tensor_data.dtype,
|
||||
'shape': tensor_data.shape
|
||||
}
|
||||
}
|
||||
})
|
||||
tensor_name = "tensor_" + str(test_index + x + 1)
|
||||
tensor = write_tensor_to_json(tensor_info, tensor_data)
|
||||
self.tensor_json.append({tensor_name: tensor})
|
||||
if is_print:
|
||||
with open(self.test_name + "_expected.json", "w") as dump_f:
|
||||
json.dump(self.tensor_json, dump_f, indent=4, separators=(',', ': '))
|
||||
|
|
|
@ -21,7 +21,7 @@ import json
|
|||
import shutil
|
||||
import numpy as np
|
||||
import mindspore.offline_debug.dbg_services as d
|
||||
from dump_test_utils import build_dump_structure
|
||||
from dump_test_utils import build_dump_structure, write_tensor_to_json
|
||||
from tests.security_utils import security_off_wrap
|
||||
|
||||
|
||||
|
@ -115,50 +115,17 @@ class TestOfflineReadNonExistTensor:
|
|||
with open(golden_file) as f:
|
||||
expected_list = json.load(f)
|
||||
for x, (tensor_info, tensor_data) in enumerate(zip(tensor_info_list, tensor_data_list)):
|
||||
tensor_id = "tensor_"+ str(test_index+x+1)
|
||||
info = expected_list[x+test_index][tensor_id]
|
||||
assert tensor_info.node_name == info['tensor_info']['node_name']
|
||||
assert tensor_info.slot == info['tensor_info']['slot']
|
||||
assert tensor_info.iteration == info['tensor_info']['iteration']
|
||||
assert tensor_info.rank_id == info['tensor_info']['rank_id']
|
||||
assert tensor_info.root_graph_id == info['tensor_info']['root_graph_id']
|
||||
assert tensor_info.is_output == info['tensor_info']['is_output']
|
||||
actual_data = np.frombuffer(
|
||||
tensor_data.data_ptr, np.uint8, tensor_data.data_size).tolist()
|
||||
assert actual_data == info['tensor_data']['data']
|
||||
assert tensor_data.data_size == info['tensor_data']['size_in_bytes']
|
||||
assert tensor_data.dtype == info['tensor_data']['debugger_dtype']
|
||||
assert tensor_data.shape == info['tensor_data']['shape']
|
||||
tensor_id = "tensor_" + str(test_index + x + 1)
|
||||
expect_tensor = expected_list[x + test_index][tensor_id]
|
||||
actual_tensor = write_tensor_to_json(tensor_info, tensor_data)
|
||||
assert expect_tensor == actual_tensor
|
||||
|
||||
def print_read_tensors(self, tensor_info_list, tensor_data_list, test_index, is_print):
|
||||
"""Print read tensors result if GENERATE_GOLDEN is True."""
|
||||
for x, (tensor_info, tensor_data) in enumerate(zip(tensor_info_list, tensor_data_list)):
|
||||
tensor = "tensor_" + str(test_index+x+1)
|
||||
data = np.frombuffer(
|
||||
tensor_data.data_ptr, np.uint8, tensor_data.data_size).tolist()
|
||||
py_byte_size = len(tensor_data.data_ptr)
|
||||
c_byte_size = tensor_data.data_size
|
||||
if c_byte_size != py_byte_size:
|
||||
print("The python byte size of " + str(py_byte_size) +
|
||||
" does not match the C++ byte size of " + str(c_byte_size) + "\n")
|
||||
self.tensor_json.append({
|
||||
tensor: {
|
||||
'tensor_info': {
|
||||
'node_name': tensor_info.node_name,
|
||||
'slot': tensor_info.slot,
|
||||
'iteration': tensor_info.iteration,
|
||||
'rank_id': tensor_info.rank_id,
|
||||
'root_graph_id': tensor_info.root_graph_id,
|
||||
'is_output': tensor_info.is_output
|
||||
},
|
||||
'tensor_data': {
|
||||
'data': data,
|
||||
'size_in_bytes': tensor_data.data_size,
|
||||
'debugger_dtype': tensor_data.dtype,
|
||||
'shape': tensor_data.shape
|
||||
}
|
||||
}
|
||||
})
|
||||
tensor_name = "tensor_" + str(test_index + x + 1)
|
||||
tensor = write_tensor_to_json(tensor_info, tensor_data)
|
||||
self.tensor_json.append({tensor_name: tensor})
|
||||
if is_print:
|
||||
with open(self.test_name + "_expected.json", "w") as dump_f:
|
||||
json.dump(self.tensor_json, dump_f, indent=4, separators=(',', ': '))
|
||||
|
|
|
@ -21,7 +21,7 @@ import shutil
|
|||
import json
|
||||
import numpy as np
|
||||
import mindspore.offline_debug.dbg_services as d
|
||||
from dump_test_utils import build_dump_structure
|
||||
from dump_test_utils import build_dump_structure, write_tensor_stat_to_json
|
||||
from tests.security_utils import security_off_wrap
|
||||
|
||||
|
||||
|
@ -121,72 +121,19 @@ class TestOfflineReadTensorBaseStat:
|
|||
for x, (tensor_info_item, tensor_base, tensor_stat) in enumerate(zip(tensor_info,
|
||||
tensor_base_data_list,
|
||||
tensor_stat_data_list)):
|
||||
test_id = "test"+ str(test_index+x+1)
|
||||
info_json = expected_list[x+test_index][test_id]['tensor_info']
|
||||
base_json = expected_list[x+test_index][test_id]['tensor_base_info']
|
||||
stat_json = expected_list[x+test_index][test_id]['tensor_stat_info']
|
||||
assert tensor_info_item.node_name == info_json['node_name']
|
||||
assert tensor_info_item.slot == info_json['slot']
|
||||
assert tensor_info_item.iteration == info_json['iteration']
|
||||
assert tensor_info_item.rank_id == info_json['rank_id']
|
||||
assert tensor_info_item.root_graph_id == info_json['root_graph_id']
|
||||
assert tensor_info_item.is_output == info_json['is_output']
|
||||
assert tensor_base.data_size == base_json['size_in_bytes']
|
||||
assert tensor_base.dtype == base_json['debugger_dtype']
|
||||
assert tensor_base.shape == base_json['shape']
|
||||
assert tensor_stat.data_size == stat_json['size_in_bytes']
|
||||
assert tensor_stat.dtype == stat_json['debugger_dtype']
|
||||
assert tensor_stat.shape == stat_json['shape']
|
||||
assert tensor_stat.is_bool == stat_json['is_bool']
|
||||
assert tensor_stat.max_value == stat_json['max_vaue']
|
||||
assert tensor_stat.min_value == stat_json['min_value']
|
||||
assert tensor_stat.avg_value == stat_json['avg_value']
|
||||
assert tensor_stat.count == stat_json['count']
|
||||
assert tensor_stat.neg_zero_count == stat_json['neg_zero_count']
|
||||
assert tensor_stat.pos_zero_count == stat_json['pos_zero_count']
|
||||
assert tensor_stat.nan_count == stat_json['nan_count']
|
||||
assert tensor_stat.neg_inf_count == stat_json['neg_inf_count']
|
||||
assert tensor_stat.pos_inf_count == stat_json['pos_inf_count']
|
||||
assert tensor_stat.zero_count == stat_json['zero_count']
|
||||
test_id = "test"+ str(test_index + x + 1)
|
||||
expect_tensor = expected_list[x + test_index][test_id]
|
||||
actual_tensor = write_tensor_stat_to_json(tensor_info_item, tensor_base, tensor_stat)
|
||||
assert actual_tensor == expect_tensor
|
||||
|
||||
def print_read_tensors(self, tensor_info, tensor_base_data_list, tensor_stat_data_list, test_index, is_print):
|
||||
"""Print read tensors info."""
|
||||
for x, (tensor_info_item, tensor_base, tensor_stat) in enumerate(zip(tensor_info,
|
||||
tensor_base_data_list,
|
||||
tensor_stat_data_list)):
|
||||
test_name = "test" + str(test_index+x+1)
|
||||
self.tensor_json.append({
|
||||
test_name: {
|
||||
'tensor_info': {
|
||||
'node_name': tensor_info_item.node_name,
|
||||
'slot': tensor_info_item.slot,
|
||||
'iteration': tensor_info_item.iteration,
|
||||
'rank_id': tensor_info_item.rank_id,
|
||||
'root_graph_id': tensor_info_item.root_graph_id,
|
||||
'is_output': tensor_info_item.is_output
|
||||
},
|
||||
'tensor_base_info': {
|
||||
'size_in_bytes': tensor_base.data_size,
|
||||
'debugger_dtype': tensor_base.dtype,
|
||||
'shape': tensor_base.shape
|
||||
},
|
||||
'tensor_stat_info': {
|
||||
'size_in_bytes': tensor_stat.data_size,
|
||||
'debugger_dtype': tensor_stat.dtype,
|
||||
'shape': tensor_stat.shape,
|
||||
'is_bool': tensor_stat.is_bool,
|
||||
'max_vaue': tensor_stat.max_value,
|
||||
'min_value': tensor_stat.min_value,
|
||||
'avg_value': tensor_stat.avg_value,
|
||||
'count': tensor_stat.count,
|
||||
'neg_zero_count': tensor_stat.neg_zero_count,
|
||||
'pos_zero_count': tensor_stat.pos_zero_count,
|
||||
'nan_count': tensor_stat.nan_count,
|
||||
'neg_inf_count': tensor_stat.neg_inf_count,
|
||||
'pos_inf_count': tensor_stat.pos_inf_count,
|
||||
'zero_count': tensor_stat.zero_count
|
||||
}
|
||||
}})
|
||||
test_name = "test" + str(test_index + x + 1)
|
||||
tensor = write_tensor_stat_to_json(tensor_info_item, tensor_base, tensor_stat)
|
||||
self.tensor_json.append({test_name: tensor})
|
||||
if is_print:
|
||||
with open(self.test_name + "_expected.json", "w") as dump_f:
|
||||
json.dump(self.tensor_json, dump_f, indent=4, separators=(',', ': '))
|
||||
|
|
|
@ -21,7 +21,7 @@ import json
|
|||
import shutil
|
||||
import numpy as np
|
||||
import mindspore.offline_debug.dbg_services as d
|
||||
from dump_test_utils import build_dump_structure
|
||||
from dump_test_utils import build_dump_structure, write_watchpoint_to_json
|
||||
from tests.security_utils import security_off_wrap
|
||||
|
||||
|
||||
|
@ -188,51 +188,17 @@ class TestOfflineWatchpoints:
|
|||
with open(golden_file) as f:
|
||||
expected_list = json.load(f)
|
||||
for x, watchpoint_hits in enumerate(watchpoint_hits_list):
|
||||
test_id = "watchpoint_hit" + str(test_index+x+1)
|
||||
info = expected_list[x+test_index][test_id]
|
||||
assert watchpoint_hits.name == info['name']
|
||||
assert watchpoint_hits.slot == info['slot']
|
||||
assert watchpoint_hits.condition == info['condition']
|
||||
assert watchpoint_hits.watchpoint_id == info['watchpoint_id']
|
||||
assert watchpoint_hits.error_code == info['error_code']
|
||||
assert watchpoint_hits.rank_id == info['rank_id']
|
||||
assert watchpoint_hits.root_graph_id == info['root_graph_id']
|
||||
for p, _ in enumerate(watchpoint_hits.parameters):
|
||||
parameter = "parameter" + str(p)
|
||||
assert watchpoint_hits.parameters[p].name == info['paremeter'][p][parameter]['name']
|
||||
assert watchpoint_hits.parameters[p].disabled == info['paremeter'][p][parameter]['disabled']
|
||||
assert watchpoint_hits.parameters[p].value == info['paremeter'][p][parameter]['value']
|
||||
assert watchpoint_hits.parameters[p].hit == info['paremeter'][p][parameter]['hit']
|
||||
assert watchpoint_hits.parameters[p].actual_value == info['paremeter'][p][parameter]['actual_value']
|
||||
test_id = "watchpoint_hit" + str(test_index + x + 1)
|
||||
expect_wp = expected_list[x + test_index][test_id]
|
||||
actual_wp = write_watchpoint_to_json(watchpoint_hits)
|
||||
assert actual_wp == expect_wp
|
||||
|
||||
def print_watchpoint_hits(self, watchpoint_hits_list, test_index, is_print):
|
||||
"""Print watchpoint hits."""
|
||||
for x, watchpoint_hits in enumerate(watchpoint_hits_list):
|
||||
parameter_json = []
|
||||
for p, _ in enumerate(watchpoint_hits.parameters):
|
||||
parameter = "parameter" + str(p)
|
||||
parameter_json.append({
|
||||
parameter: {
|
||||
'name': watchpoint_hits.parameters[p].name,
|
||||
'disabled': watchpoint_hits.parameters[p].disabled,
|
||||
'value': watchpoint_hits.parameters[p].value,
|
||||
'hit': watchpoint_hits.parameters[p].hit,
|
||||
'actual_value': watchpoint_hits.parameters[p].actual_value
|
||||
}
|
||||
})
|
||||
watchpoint_hit = "watchpoint_hit" + str(test_index+x+1)
|
||||
self.watchpoint_hits_json.append({
|
||||
watchpoint_hit: {
|
||||
'name': watchpoint_hits.name,
|
||||
'slot': watchpoint_hits.slot,
|
||||
'condition': watchpoint_hits.condition,
|
||||
'watchpoint_id': watchpoint_hits.watchpoint_id,
|
||||
'paremeter': parameter_json,
|
||||
'error_code': watchpoint_hits.error_code,
|
||||
'rank_id': watchpoint_hits.rank_id,
|
||||
'root_graph_id': watchpoint_hits.root_graph_id
|
||||
}
|
||||
})
|
||||
watchpoint_hit = "watchpoint_hit" + str(test_index + x + 1)
|
||||
wp = write_watchpoint_to_json(watchpoint_hits)
|
||||
self.watchpoint_hits_json.append({watchpoint_hit: wp})
|
||||
if is_print:
|
||||
with open(self.test_name + "_expected.json", "w") as dump_f:
|
||||
json.dump(self.watchpoint_hits_json, dump_f, indent=4, separators=(',', ': '))
|
||||
|
|
Loading…
Reference in New Issue