forked from mindspore-Ecosystem/mindspore
Add ascend ST testcases for offline debug
Find whl installing path
This commit is contained in:
parent
489a92aaa2
commit
48616eb340
|
@ -22,6 +22,7 @@
|
|||
#include <map>
|
||||
#include <numeric>
|
||||
#include <unordered_set>
|
||||
#include "pybind11/embed.h"
|
||||
#ifdef ONLINE_DBG_MODE
|
||||
#include "backend/session/anf_runtime_algorithm.h"
|
||||
#endif
|
||||
|
@ -352,10 +353,6 @@ void DebugServices::ConvertToHostFormat(const std::map<std::string, std::vector<
|
|||
files_to_convert_in_dir.push_back(dump_key + "/" + file_name);
|
||||
}
|
||||
}
|
||||
std::string current_working_dir(__FILE__);
|
||||
std::size_t pos = current_working_dir.find_last_of("\\/");
|
||||
current_working_dir = (std::string::npos == pos) ? "" : current_working_dir.substr(0, pos);
|
||||
MS_LOG(INFO) << current_working_dir;
|
||||
std::ostringstream input_file_o;
|
||||
const char *const delim = " ";
|
||||
std::copy(files_to_convert_in_dir.begin(), files_to_convert_in_dir.end(),
|
||||
|
@ -363,9 +360,19 @@ void DebugServices::ConvertToHostFormat(const std::map<std::string, std::vector<
|
|||
std::string input_files = input_file_o.str();
|
||||
MS_LOG(INFO) << "Ops to convert: " << input_files;
|
||||
if (input_files != "") {
|
||||
std::string convert_command = "python " + current_working_dir + "/convert_async.py -out " + dump_key + " -t " +
|
||||
file_format + " -d " + dump_key + " -f NCHW -l " + input_files;
|
||||
(void)(system(convert_command.c_str()) + 1);
|
||||
// Look for the installation path to the conver_async package. If not found, throw exception and terminate the
|
||||
// later task.
|
||||
try {
|
||||
auto pkg = pybind11::module::import("mindspore.offline_debug.convert_async");
|
||||
std::string convert_pkg_path = pkg.attr("__file__").cast<std::string>();
|
||||
MS_LOG(INFO) << "The file for converting async dump data is in " << convert_pkg_path;
|
||||
std::string convert_command = "python " + convert_pkg_path + " -out " + dump_key + " -t " + file_format +
|
||||
" -d " + dump_key + " -f NCHW -l " + input_files;
|
||||
(void)(system(convert_command.c_str()) + 1);
|
||||
} catch (pybind11::error_already_set &e) {
|
||||
MS_LOG(EXCEPTION) << "Can't find package mindspore.offline_debug.convert_async";
|
||||
}
|
||||
|
||||
DIR *d_handle;
|
||||
d_handle = opendir(dump_key.c_str());
|
||||
if (d_handle != nullptr) {
|
||||
|
|
|
@ -63,41 +63,41 @@ def handle_multi_process(convert_obj, files):
|
|||
# pylint: enable=W0212
|
||||
return return_code
|
||||
|
||||
if __name__ == "__main__":
|
||||
convert_parser = argparse.ArgumentParser()
|
||||
convert_parser.add_argument(
|
||||
'-d', '--dump_file', dest='dump_path', default='', required=True)
|
||||
convert_parser.add_argument(
|
||||
'-l', '--file_list', nargs="*", dest='file_list', default='')
|
||||
convert_parser.add_argument('-f', '--format', dest='format', default=None)
|
||||
convert_parser.add_argument(
|
||||
'-v', '--version', dest='dump_version', choices=[1, 2], type=int, default=2)
|
||||
convert_parser.add_argument('-s', '--shape', dest='shape', default=None)
|
||||
convert_parser.add_argument('-o', '--output_tensor',
|
||||
dest='output', default=None)
|
||||
convert_parser.add_argument('-i', '--input_tensor', dest='input', default=None)
|
||||
convert_parser.add_argument(
|
||||
'-c', '--custom_script_path', dest='custom_script_path', default=None)
|
||||
convert_parser.add_argument('-out', '--output', dest='output_path', default='')
|
||||
convert_parser.add_argument(
|
||||
'-t', '--type', dest='output_file_type', choices=['npy', 'bin'], default='npy')
|
||||
|
||||
convert_parser = argparse.ArgumentParser()
|
||||
convert_parser.add_argument(
|
||||
'-d', '--dump_file', dest='dump_path', default='', required=True)
|
||||
convert_parser.add_argument(
|
||||
'-l', '--file_list', nargs="*", dest='file_list', default='')
|
||||
convert_parser.add_argument('-f', '--format', dest='format', default=None)
|
||||
convert_parser.add_argument(
|
||||
'-v', '--version', dest='dump_version', choices=[1, 2], type=int, default=2)
|
||||
convert_parser.add_argument('-s', '--shape', dest='shape', default=None)
|
||||
convert_parser.add_argument('-o', '--output_tensor',
|
||||
dest='output', default=None)
|
||||
convert_parser.add_argument('-i', '--input_tensor', dest='input', default=None)
|
||||
convert_parser.add_argument(
|
||||
'-c', '--custom_script_path', dest='custom_script_path', default=None)
|
||||
convert_parser.add_argument('-out', '--output', dest='output_path', default='')
|
||||
convert_parser.add_argument(
|
||||
'-t', '--type', dest='output_file_type', choices=['npy', 'bin'], default='npy')
|
||||
|
||||
args = convert_parser.parse_args()
|
||||
dump_failed = os.path.abspath(args.dump_path) + "/convert_failed_file_list.txt"
|
||||
if os.path.exists(dump_failed):
|
||||
os.remove(dump_failed)
|
||||
file_list = args.file_list
|
||||
if args.format is not None:
|
||||
convert = FormatConversionMain(args)
|
||||
else:
|
||||
convert = DumpDataParser(args)
|
||||
if args.file_list == "":
|
||||
file_list = os.listdir(args.dump_path)
|
||||
handle_multi_process(convert, file_list)
|
||||
if os.path.exists(dump_failed):
|
||||
with open(dump_failed, newline='') as failed_ops:
|
||||
file_reader = csv.reader(failed_ops, delimiter=',')
|
||||
file_list = [os.path.abspath(row[0]) for row in file_reader]
|
||||
args.format = None
|
||||
convert = DumpDataParser(args)
|
||||
args = convert_parser.parse_args()
|
||||
dump_failed = os.path.abspath(args.dump_path) + "/convert_failed_file_list.txt"
|
||||
if os.path.exists(dump_failed):
|
||||
os.remove(dump_failed)
|
||||
file_list = args.file_list
|
||||
if args.format is not None:
|
||||
convert = FormatConversionMain(args)
|
||||
else:
|
||||
convert = DumpDataParser(args)
|
||||
if args.file_list == "":
|
||||
file_list = os.listdir(args.dump_path)
|
||||
handle_multi_process(convert, file_list)
|
||||
if os.path.exists(dump_failed):
|
||||
with open(dump_failed, newline='') as failed_ops:
|
||||
file_reader = csv.reader(failed_ops, delimiter=',')
|
||||
file_list = [os.path.abspath(row[0]) for row in file_reader]
|
||||
args.format = None
|
||||
convert = DumpDataParser(args)
|
||||
handle_multi_process(convert, file_list)
|
|
@ -0,0 +1,30 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ==============================================================================
|
||||
"""
|
||||
Utils for testing offline debugger.
|
||||
"""
|
||||
|
||||
import filecmp
|
||||
import os
|
||||
|
||||
|
||||
def compare_actual_with_expected(test_name):
|
||||
"""Compare actual file with expected."""
|
||||
pwd = os.getcwd()
|
||||
is_eq = filecmp.cmp(pwd + "/golden/" +
|
||||
test_name + ".expected", test_name + ".actual", shallow=False)
|
||||
if os.path.exists(test_name + ".actual"):
|
||||
os.remove(test_name + ".actual")
|
||||
return is_eq
|
|
@ -0,0 +1,28 @@
|
|||
-----------------------------------------------------------
|
||||
tensor_info_1 attributes:
|
||||
node name = Default/network-TrainOneStepCell/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op169
|
||||
slot = 0
|
||||
iteration = 2
|
||||
device_id = None
|
||||
root_graph_id = 1
|
||||
is_parameter = False
|
||||
|
||||
tensor_data_1 attributes:
|
||||
data (printed in uint8) = [149 167 122 ... 160 212 164]
|
||||
size in bytes = 2076672
|
||||
debugger dtype = 10
|
||||
shape = [32, 12, 13, 13, 16]
|
||||
-----------------------------------------------------------
|
||||
tensor_info_2 attributes:
|
||||
node name = Default/network-TrainOneStepCell/network-WithLossCell/_backbone-AlexNet/ReLUV2-op348
|
||||
slot = 1
|
||||
iteration = 2
|
||||
device_id = None
|
||||
root_graph_id = 1
|
||||
is_parameter = False
|
||||
|
||||
tensor_data_2 attributes:
|
||||
data (printed in uint8) = [ 20 21 18 ... 126 98 25]
|
||||
size in bytes = 129792
|
||||
debugger dtype = 6
|
||||
shape = [32, 12, 13, 13, 2]
|
|
@ -0,0 +1,14 @@
|
|||
-----------------------------------------------------------
|
||||
watchpoint_hit for test_1 attributes:
|
||||
name = Default/network-TrainOneStepCell/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op169
|
||||
slot = 0
|
||||
condition = 6
|
||||
watchpoint_id = 1
|
||||
parameter 0 name = param
|
||||
parameter 0 disabled = False
|
||||
parameter 0 value = 0.0
|
||||
parameter 0 hit = True
|
||||
parameter 0 actual_value = -0.1417236328125
|
||||
error code = 0
|
||||
device_id = 0
|
||||
root_graph_id = 1
|
|
@ -0,0 +1,73 @@
|
|||
-----------------------------------------------------------
|
||||
tensor_info_1 attributes:
|
||||
node name = Default/network-WithLossCell/_backbone-AlexNet/conv2-Conv2d/conv2.bias
|
||||
slot = 0
|
||||
iteration = 2
|
||||
device_id = None
|
||||
root_graph_id = 0
|
||||
is_parameter = True
|
||||
|
||||
tensor_data_1 attributes:
|
||||
data (printed in uint8) = [170 19 44 181 254 212 16 52 52 162 148 180 130 115 226 180 183 243
|
||||
101 52 224 79 189 51 10 70 69 51 199 75 159 52 79 98 104 52
|
||||
106 77 19 52 129 183 8 180 252 58 48 180 35 219 9 52 240 201
|
||||
179 51 142 151 158 51 210 145 182 53 140 219 0 53 140 219 22 181
|
||||
46 33 87 180 238 90 122 180 166 10 38 179 202 195 4 53 166 10
|
||||
150 51 214 120 209 52 235 115 37 180 92 177 215 180 0 136 84 51
|
||||
72 114 145 180 43 169 255 180 114 27 61 52 76 225 122 50 126 72
|
||||
159 51 58 35 202 51 114 61 106 51 60 223 63 52 209 179 1 52
|
||||
232 217 44 178 130 158 109 179 213 231 10 179 37 40 94 179 208 68
|
||||
64 53 6 52 249 52 162 35 1 181 231 29 155 52 30 201 69 180
|
||||
229 131 126 51 18 165 109 180 164 112 163 181 116 172 11 178 6 129
|
||||
37 52 54 205 203 180 115 104 145 52 232 106 219 179 36 40 214 52
|
||||
202 50 204 52 76 89 38 179 230 140 232 178 168 53 77 52 180 191
|
||||
108 51 128 183 64 51 56 137 161 180 247 6 143 180 126 63 197 180
|
||||
198 177 94 52 140 185 139 51 150 178 228 180 255 67 150 52 134 201
|
||||
164 52 107 43 14 53 174 216 63 179 40 160 41 53 120 88 72 179
|
||||
218 172 234 52 234 38 25 52 85 159 155 180 254 67 138 180 34 253
|
||||
118 180 218 61 17 52 242 133 253 52 175 37 180 52 171 62 163 52
|
||||
202 195 86 53 160 171 45 52 34 31 176 180 156 85 5 53 178 191
|
||||
68 180 42 203 140 52 248 117 72 52 248 253 212 176 195 100 202 51
|
||||
87 14 141 52 91 100 235 51 48 221 136 52 143 117 17 180 51 196
|
||||
25 52 127 29 112 180 152 144 207 178 219 104 64 52 21 174 251 52
|
||||
164 78 138 181 20 63 6 52 10 249 96 179 163 146 18 53 200 186
|
||||
236 52 2 188 85 52 124 140 121 179 246 185 22 181 246 74 249 51
|
||||
70 182 135 53 189 227 76 52 249 160 159 180 134 235 65 53 64 164
|
||||
255 51 224 156 41 53 142 117 69 181 247 151 101 53 185 175 35 52
|
||||
164 112 21 53 30 31 212 179 142 151 110 179 176 148 29 181 206 204
|
||||
88 53 116 215 214 180 172 173 216 51 106 222 153 180 200 152 19 181
|
||||
176 3 7 52 215 52 87 52]
|
||||
size in bytes = 512
|
||||
debugger dtype = 11
|
||||
shape = [128]
|
||||
-----------------------------------------------------------
|
||||
tensor_info_2 attributes:
|
||||
node name = Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op168
|
||||
slot = 0
|
||||
iteration = 2
|
||||
device_id = None
|
||||
root_graph_id = 0
|
||||
is_parameter = False
|
||||
|
||||
tensor_data_2 attributes:
|
||||
data (printed in uint8) = [181 167 46 26 122 155 141 164 212 39 111 27 247 156 1 152 189 36
|
||||
15 161 254 167 82 163 33 42 101 158 225 161 24 167 103 140 45 42
|
||||
178 170 173 29 48 42 39 32 56 25 216 170 128 41 216 23 153 154
|
||||
39 173 193 42 84 160 111 22 61 144]
|
||||
size in bytes = 64
|
||||
debugger dtype = 10
|
||||
shape = [2, 2, 2, 2, 2]
|
||||
-----------------------------------------------------------
|
||||
tensor_info_3 attributes:
|
||||
node name = Default/network-WithLossCell/_backbone-AlexNet/ReLUV2-op346
|
||||
slot = 1
|
||||
iteration = 2
|
||||
device_id = None
|
||||
root_graph_id = 0
|
||||
is_parameter = False
|
||||
|
||||
tensor_data_3 attributes:
|
||||
data (printed in uint8) = [ 50 17 122 ... 94 42 90]
|
||||
size in bytes = 129792
|
||||
debugger dtype = 6
|
||||
shape = [32, 12, 13, 13, 2]
|
|
@ -0,0 +1,33 @@
|
|||
-----------------------------------------------------------
|
||||
watchpoint_hit for test_1 attributes:
|
||||
name = Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op168
|
||||
slot = 0
|
||||
condition = 6
|
||||
watchpoint_id = 1
|
||||
parameter 0 name = param
|
||||
parameter 0 disabled = False
|
||||
parameter 0 value = 0.0
|
||||
parameter 0 hit = True
|
||||
parameter 0 actual_value = -0.08050537109375
|
||||
error code = 0
|
||||
device_id = 0
|
||||
root_graph_id = 0
|
||||
-----------------------------------------------------------
|
||||
watchpoint_hit for test_4 attributes:
|
||||
name = Default/network-WithLossCell/_backbone-AlexNet/fc3-Dense/Parameter[6]_11/fc3.bias
|
||||
slot = 0
|
||||
condition = 18
|
||||
watchpoint_id = 3
|
||||
parameter 0 name = abs_mean_update_ratio_gt
|
||||
parameter 0 disabled = False
|
||||
parameter 0 value = 0.0
|
||||
parameter 0 hit = True
|
||||
parameter 0 actual_value = 0.5243796973599475
|
||||
parameter 1 name = epsilon
|
||||
parameter 1 disabled = True
|
||||
parameter 1 value = 0.0
|
||||
parameter 1 hit = False
|
||||
parameter 1 actual_value = 0.0
|
||||
error code = 0
|
||||
device_id = 0
|
||||
root_graph_id = 0
|
|
@ -0,0 +1,98 @@
|
|||
-----------------------------------------------------------
|
||||
tensor_info_1 attributes:
|
||||
node name = Default/network-WithLossCell/_backbone-AlexNet/conv2-Conv2d/conv2.bias
|
||||
slot = 0
|
||||
iteration = 2
|
||||
device_id = None
|
||||
root_graph_id = 0
|
||||
is_parameter = True
|
||||
|
||||
tensor_data_1 attributes:
|
||||
data (printed in uint8) = [230 208 10 52 104 34 252 52 4 231 144 52 188 150 64 180 88 236
|
||||
15 180 254 135 180 51 131 226 147 52 88 202 62 53 2 43 55 53
|
||||
231 29 87 180 220 249 30 180 157 17 177 180 81 107 140 181 8 95
|
||||
192 180 89 134 112 180 96 238 90 178 156 196 212 180 206 25 15 181
|
||||
212 154 6 180 91 211 116 52 191 14 140 51 128 106 124 53 28 158
|
||||
70 181 182 21 251 50 100 204 157 179 88 202 42 180 7 95 8 53
|
||||
128 251 238 52 241 133 241 52 111 86 157 179 48 221 148 180 200 7
|
||||
141 180 236 226 182 51 190 82 158 180 140 108 179 180 195 134 215 179
|
||||
103 213 39 179 89 168 149 180 42 58 58 180 64 53 62 179 250 126
|
||||
158 52 38 83 117 52 0 0 136 180 136 133 122 51 110 18 131 179
|
||||
238 13 94 51 102 136 15 181 134 90 227 180 16 11 117 180 35 74
|
||||
163 52 105 0 87 181 112 18 131 50 226 233 67 181 217 172 10 52
|
||||
206 25 217 52 208 213 22 52 146 203 87 180 74 46 207 52 178 191
|
||||
4 180 100 93 216 52 119 190 171 180 223 2 5 181 128 72 207 179
|
||||
58 146 11 179 224 79 137 52 143 228 154 180 246 219 215 179 14 79
|
||||
195 52 126 29 64 52 132 192 42 51 94 220 86 52 94 109 1 181
|
||||
72 37 117 178 110 197 94 180 160 94 153 179 118 224 80 181 156 17
|
||||
37 50 120 156 162 53 26 115 135 180 228 20 29 53 145 126 147 52
|
||||
99 16 48 180 211 188 199 180 52 51 99 180 93 254 227 52 152 126
|
||||
123 49 6 18 16 181 5 163 130 51 27 158 98 53 134 235 189 52
|
||||
119 45 9 180 130 115 110 52 158 128 162 52 232 251 197 180 178 46
|
||||
158 179 57 214 157 52 172 207 161 180 208 0 222 49 242 99 32 53
|
||||
20 174 135 50 247 117 176 52 194 57 43 180 140 108 135 51 243 65
|
||||
175 51 187 73 156 51 63 232 217 50 180 234 115 52 194 168 148 52
|
||||
27 192 183 180 45 178 157 52 125 208 17 53 236 192 65 53 190 193
|
||||
7 53 254 246 57 53 3 43 199 51 64 164 215 180 220 104 240 51
|
||||
23 72 24 180 68 173 9 51 72 114 29 53 105 0 57 181 188 150
|
||||
8 53 229 97 131 53 0 34 189 51 163 146 74 53 31 244 204 51
|
||||
86 193 220 180 156 51 146 179]
|
||||
size in bytes = 512
|
||||
debugger dtype = 11
|
||||
shape = [128]
|
||||
-----------------------------------------------------------
|
||||
tensor_info_2 attributes:
|
||||
node name = Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op171
|
||||
slot = 0
|
||||
iteration = 2
|
||||
device_id = None
|
||||
root_graph_id = 0
|
||||
is_parameter = False
|
||||
|
||||
tensor_data_2 attributes:
|
||||
data (printed in uint8) = [ 99 26 69 41 190 38 128 38 232 38 16 39 5 39 24 39 1 39
|
||||
218 38 219 38 43 39 241 33 21 165 159 32 15 145 191 28 66 30
|
||||
110 30 149 31 14 29 179 29 249 28 94 29 141 156 210 36 143 166
|
||||
201 162 5 165 54 166 100 165 57 165 81 165 25 166 150 165 236 164
|
||||
20 164 238 165 170 20 200 168 16 168 36 169 9 169 195 168 64 168
|
||||
248 168 10 169 20 168 56 167 137 167 124 168 221 152 35 168 163 167
|
||||
110 169 147 168 198 167 52 168 91 168 14 168 30 168 240 167 171 168
|
||||
235 168 37 161 222 165 16 161 88 164 68 162 156 152 109 151 181 156
|
||||
0 152 84 158 112 154 193 161 13 162 172 28 38 163 16 31 255 26
|
||||
102 21 64 31 177 28 102 156 77 20 62 25 177 26 26 22 241 24
|
||||
188 33 149 160 67 36 171 35 38 36 68 34 148 19 54 162 53 161
|
||||
174 156 195 134 139 24 210 35 175 36 206 158 136 37 88 36 31 36
|
||||
78 20 203 159 6 165 235 163 83 162 7 157 76 31 240 35 38 37
|
||||
20 160 193 38 130 29 95 23 177 161 143 162 46 165 103 164 106 163
|
||||
167 162 36 158 130 161 149 33 171 157 138 37 252 27 198 164 116 166
|
||||
60 165 36 165 47 165 150 166 188 166 112 167 58 166 33 140 141 163
|
||||
93 32 38 159 13 168 194 166 78 166 8 166 201 165 115 166 128 166
|
||||
77 166 29 166 131 157 150 31 46 32 124 164 239 166 219 165 96 166
|
||||
216 166 21 167 28 167 35 167 237 165 202 164 57 32 75 26 208 40
|
||||
148 40 205 40 162 40 187 40 181 40 181 40 155 40 124 40 129 40
|
||||
157 40 186 29 253 32 138 44 226 43 43 43 237 42 164 42 137 42
|
||||
174 42 179 42 160 42 104 42 30 42 53 38 140 25 240 44 120 44
|
||||
236 42 19 43 143 42 6 42 181 41 83 42 0 43 112 42 97 41
|
||||
27 32 177 32 254 44 105 43 242 40 239 40 71 41 223 40 237 40
|
||||
93 41 22 41 211 40 227 40 187 20 71 30 4 44 188 40 79 36
|
||||
133 38 62 39 209 38 15 38 83 38 136 38 146 38 100 37 118 152
|
||||
185 149 165 42 99 41 61 36 241 37 34 38 170 38 62 38 69 39
|
||||
215 39 128 39 49 38 54 33 141 161 184 41 34 40 100 36 230 37
|
||||
133 38 57 37 224 35 7 37]
|
||||
size in bytes = 512
|
||||
debugger dtype = 10
|
||||
shape = [4, 4, 4, 4]
|
||||
-----------------------------------------------------------
|
||||
tensor_info_3 attributes:
|
||||
node name = Default/network-WithLossCell/_backbone-AlexNet/ReLUV2-op353
|
||||
slot = 1
|
||||
iteration = 2
|
||||
device_id = None
|
||||
root_graph_id = 0
|
||||
is_parameter = False
|
||||
|
||||
tensor_data_3 attributes:
|
||||
data (printed in uint8) = [19 17 27 ... 94 42 90]
|
||||
size in bytes = 129792
|
||||
debugger dtype = 6
|
||||
shape = [32, 12, 13, 13, 2]
|
|
@ -0,0 +1,83 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ==============================================================================
|
||||
"""
|
||||
Read tensor test script for offline debugger APIs.
|
||||
"""
|
||||
|
||||
import mindspore.offline_debug.dbg_services as d
|
||||
import numpy as np
|
||||
import pytest
|
||||
from dump_test_utils import compare_actual_with_expected
|
||||
|
||||
GENERATE_GOLDEN = False
|
||||
test_name = "async_sink_mode_true_read_tensors"
|
||||
|
||||
|
||||
@pytest.mark.level0
|
||||
@pytest.mark.platform_arm_ascend_training
|
||||
@pytest.mark.platform_x86_ascend_training
|
||||
@pytest.mark.env_onecard
|
||||
def test_async_sink_mode_true_read_tensors():
|
||||
debugger_backend = d.DbgServices(
|
||||
dump_file_path="/home/workspace/mindspore_dataset/dumps/async_sink_true/")
|
||||
|
||||
_ = debugger_backend.initialize(net_name="alexnet", is_sync_mode=False)
|
||||
|
||||
# output tensor with zero slot
|
||||
info1 = d.TensorInfo(node_name="Default/network-TrainOneStepCell/network-WithLossCell/_backbone-AlexNet/"
|
||||
"conv3-Conv2d/Conv2D-op169",
|
||||
slot=0, iteration=2, device_id=0, root_graph_id=1, is_parameter=False)
|
||||
# output tensor with non-zero slot
|
||||
info2 = d.TensorInfo(node_name="Default/network-TrainOneStepCell/network-WithLossCell/_backbone-AlexNet/"
|
||||
"ReLUV2-op348",
|
||||
slot=1, iteration=2, device_id=0, root_graph_id=1, is_parameter=False)
|
||||
|
||||
tensor_info = [info1, info2]
|
||||
|
||||
tensor_data = debugger_backend.read_tensors(tensor_info)
|
||||
|
||||
print_read_tensors(tensor_info, tensor_data)
|
||||
if not GENERATE_GOLDEN:
|
||||
assert compare_actual_with_expected(test_name)
|
||||
|
||||
|
||||
def print_read_tensors(tensor_info, tensor_data):
|
||||
"""Print read tensors."""
|
||||
if GENERATE_GOLDEN:
|
||||
f_write = open(test_name + ".expected", "w")
|
||||
else:
|
||||
f_write = open(test_name + ".actual", "w")
|
||||
for x, _ in enumerate(tensor_info):
|
||||
f_write.write("-----------------------------------------------------------\n")
|
||||
f_write.write("tensor_info_" + str(x + 1) + " attributes:\n")
|
||||
f_write.write("node name = " + tensor_info[x].node_name + "\n")
|
||||
f_write.write("slot = " + str(tensor_info[x].slot) + "\n")
|
||||
f_write.write("iteration = " + str(tensor_info[x].iteration) + "\n")
|
||||
f_write.write("device_id = " + str(tensor_info[x].device_id) + "\n")
|
||||
f_write.write("root_graph_id = " + str(tensor_info[x].root_graph_id) + "\n")
|
||||
f_write.write("is_parameter = " + str(tensor_info[x].is_parameter) + "\n")
|
||||
f_write.write("\n")
|
||||
f_write.write("tensor_data_" + str(x + 1) + " attributes:\n")
|
||||
f_write.write("data (printed in uint8) = " + str(np.frombuffer(
|
||||
tensor_data[x].data_ptr, np.uint8, tensor_data[x].data_size)) + "\n")
|
||||
py_byte_size = len(tensor_data[x].data_ptr)
|
||||
c_byte_size = tensor_data[x].data_size
|
||||
if c_byte_size != py_byte_size:
|
||||
f_write.write("The python byte size of " + str(py_byte_size) +
|
||||
" does not match the C++ byte size of " + str(c_byte_size) + "\n")
|
||||
f_write.write("size in bytes = " + str(tensor_data[x].data_size) + "\n")
|
||||
f_write.write("debugger dtype = " + str(tensor_data[x].dtype) + "\n")
|
||||
f_write.write("shape = " + str(tensor_data[x].shape) + "\n")
|
||||
f_write.close()
|
|
@ -0,0 +1,106 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ==============================================================================
|
||||
"""
|
||||
Watchpoints test script for offline debugger APIs.
|
||||
"""
|
||||
|
||||
import mindspore.offline_debug.dbg_services as d
|
||||
import pytest
|
||||
from dump_test_utils import compare_actual_with_expected
|
||||
|
||||
GENERATE_GOLDEN = False
|
||||
test_name = "async_sink_mode_true_watchpoints"
|
||||
|
||||
|
||||
@pytest.mark.level0
|
||||
@pytest.mark.platform_arm_ascend_training
|
||||
@pytest.mark.platform_x86_ascend_training
|
||||
@pytest.mark.env_onecard
|
||||
def test_async_sink_mode_true_watchpoints():
|
||||
if GENERATE_GOLDEN:
|
||||
f_write = open(test_name + ".expected", "w")
|
||||
else:
|
||||
f_write = open(test_name + ".actual", "w")
|
||||
|
||||
debugger_backend = d.DbgServices(
|
||||
dump_file_path="/home/workspace/mindspore_dataset/dumps/async_sink_true/")
|
||||
|
||||
_ = debugger_backend.initialize(net_name="alexnet", is_sync_mode=False)
|
||||
|
||||
# NOTES:
|
||||
# -> watch_condition=6 is MIN_LT
|
||||
# -> watch_condition=18 is CHANGE_TOO_LARGE
|
||||
|
||||
# test 1: watchpoint set and hit (watch_condition=6)
|
||||
param1 = d.Parameter(name="param", disabled=False, value=0.0)
|
||||
_ = debugger_backend.add_watchpoint(watchpoint_id=1, watch_condition=6,
|
||||
check_node_list={"Default/network-TrainOneStepCell/network-WithLossCell/"
|
||||
"_backbone-AlexNet/conv3-Conv2d/Conv2D-op169":
|
||||
{"device_id": [0], "root_graph_id": [1],
|
||||
"is_parameter": False
|
||||
}}, parameter_list=[param1])
|
||||
|
||||
watchpoint_hits_test_1 = debugger_backend.check_watchpoints(iteration=2)
|
||||
if len(watchpoint_hits_test_1) != 1:
|
||||
f_write.write("ERROR -> test 1: watchpoint set but not hit just once\n")
|
||||
print_watchpoint_hits(watchpoint_hits_test_1, 1, f_write)
|
||||
|
||||
# test 2: watchpoint remove and ensure it's not hit
|
||||
_ = debugger_backend.remove_watchpoint(watchpoint_id=1)
|
||||
watchpoint_hits_test_2 = debugger_backend.check_watchpoints(iteration=2)
|
||||
if watchpoint_hits_test_2:
|
||||
f_write.write("ERROR -> test 2: watchpoint removed but hit\n")
|
||||
|
||||
# test 3: watchpoint set and not hit, then remove
|
||||
param2 = d.Parameter(name="param", disabled=False, value=-1000.0)
|
||||
_ = debugger_backend.add_watchpoint(watchpoint_id=2, watch_condition=6,
|
||||
check_node_list={"Default/network-TrainOneStepCell/network-WithLossCell/"
|
||||
"_backbone-AlexNet/conv3-Conv2d/Conv2D-op169":
|
||||
{"device_id": [0], "root_graph_id": [1],
|
||||
"is_parameter": False
|
||||
}}, parameter_list=[param2])
|
||||
|
||||
watchpoint_hits_test_3 = debugger_backend.check_watchpoints(iteration=2)
|
||||
if watchpoint_hits_test_3:
|
||||
f_write.write("ERROR -> test 3: watchpoint set but not supposed to be hit\n")
|
||||
_ = debugger_backend.remove_watchpoint(watchpoint_id=2)
|
||||
f_write.close()
|
||||
if not GENERATE_GOLDEN:
|
||||
assert compare_actual_with_expected(test_name)
|
||||
|
||||
|
||||
def print_watchpoint_hits(watchpoint_hits, test_id, f_write):
|
||||
"""Print watchpoint hits."""
|
||||
for x, _ in enumerate(watchpoint_hits):
|
||||
f_write.write("-----------------------------------------------------------\n")
|
||||
f_write.write("watchpoint_hit for test_%u attributes:" % test_id + "\n")
|
||||
f_write.write("name = " + watchpoint_hits[x].name + "\n")
|
||||
f_write.write("slot = " + str(watchpoint_hits[x].slot) + "\n")
|
||||
f_write.write("condition = " + str(watchpoint_hits[x].condition) + "\n")
|
||||
f_write.write("watchpoint_id = " + str(watchpoint_hits[x].watchpoint_id) + "\n")
|
||||
for p, _ in enumerate(watchpoint_hits[x].parameters):
|
||||
f_write.write("parameter " + str(p) + " name = " +
|
||||
watchpoint_hits[x].parameters[p].name + "\n")
|
||||
f_write.write("parameter " + str(p) + " disabled = " +
|
||||
str(watchpoint_hits[x].parameters[p].disabled) + "\n")
|
||||
f_write.write("parameter " + str(p) + " value = " +
|
||||
str(watchpoint_hits[x].parameters[p].value) + "\n")
|
||||
f_write.write("parameter " + str(p) + " hit = " +
|
||||
str(watchpoint_hits[x].parameters[p].hit) + "\n")
|
||||
f_write.write("parameter " + str(p) + " actual_value = " +
|
||||
str(watchpoint_hits[x].parameters[p].actual_value) + "\n")
|
||||
f_write.write("error code = " + str(watchpoint_hits[x].error_code) + "\n")
|
||||
f_write.write("device_id = " + str(watchpoint_hits[x].device_id) + "\n")
|
||||
f_write.write("root_graph_id = " + str(watchpoint_hits[x].root_graph_id) + "\n")
|
|
@ -0,0 +1,85 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ==============================================================================
|
||||
"""
|
||||
Read tensor test script for offline debugger APIs.
|
||||
"""
|
||||
|
||||
import mindspore.offline_debug.dbg_services as d
|
||||
import numpy as np
|
||||
import pytest
|
||||
from dump_test_utils import compare_actual_with_expected
|
||||
|
||||
GENERATE_GOLDEN = False
|
||||
test_name = "sync_trans_false_read_tensors"
|
||||
|
||||
|
||||
@pytest.mark.level0
|
||||
@pytest.mark.platform_arm_ascend_training
|
||||
@pytest.mark.platform_x86_ascend_training
|
||||
@pytest.mark.env_onecard
|
||||
def test_sync_trans_false_read_tensors():
|
||||
debugger_backend = d.DbgServices(
|
||||
dump_file_path="/home/workspace/mindspore_dataset/dumps/sync_trans_false/alexnet/")
|
||||
|
||||
_ = debugger_backend.initialize(
|
||||
net_name="Network Name goes here!", is_sync_mode=True)
|
||||
|
||||
# parameter
|
||||
info1 = d.TensorInfo(node_name="Default/network-WithLossCell/_backbone-AlexNet/conv2-Conv2d/conv2.bias",
|
||||
slot=0, iteration=2, device_id=0, root_graph_id=0, is_parameter=True)
|
||||
# output tensor with zero slot
|
||||
info2 = d.TensorInfo(node_name="Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op168",
|
||||
slot=0, iteration=2, device_id=0, root_graph_id=0, is_parameter=False)
|
||||
# output tensor with non-zero slot
|
||||
info3 = d.TensorInfo(node_name="Default/network-WithLossCell/_backbone-AlexNet/ReLUV2-op346",
|
||||
slot=1, iteration=2, device_id=0, root_graph_id=0, is_parameter=False)
|
||||
|
||||
tensor_info = [info1, info2, info3]
|
||||
|
||||
tensor_data = debugger_backend.read_tensors(tensor_info)
|
||||
|
||||
print_read_tensors(tensor_info, tensor_data)
|
||||
if not GENERATE_GOLDEN:
|
||||
assert compare_actual_with_expected(test_name)
|
||||
|
||||
|
||||
def print_read_tensors(tensor_info, tensor_data):
|
||||
"""Print read tensors."""
|
||||
if GENERATE_GOLDEN:
|
||||
f_write = open(test_name + ".expected", "w")
|
||||
else:
|
||||
f_write = open(test_name + ".actual", "w")
|
||||
for x, _ in enumerate(tensor_info):
|
||||
f_write.write("-----------------------------------------------------------\n")
|
||||
f_write.write("tensor_info_" + str(x + 1) + " attributes:\n")
|
||||
f_write.write("node name = " + tensor_info[x].node_name + "\n")
|
||||
f_write.write("slot = " + str(tensor_info[x].slot) + "\n")
|
||||
f_write.write("iteration = " + str(tensor_info[x].iteration) + "\n")
|
||||
f_write.write("device_id = " + str(tensor_info[x].device_id) + "\n")
|
||||
f_write.write("root_graph_id = " + str(tensor_info[x].root_graph_id) + "\n")
|
||||
f_write.write("is_parameter = " + str(tensor_info[x].is_parameter) + "\n")
|
||||
f_write.write("\n")
|
||||
f_write.write("tensor_data_" + str(x + 1) + " attributes:\n")
|
||||
f_write.write("data (printed in uint8) = " + str(np.frombuffer(
|
||||
tensor_data[x].data_ptr, np.uint8, tensor_data[x].data_size)) + "\n")
|
||||
py_byte_size = len(tensor_data[x].data_ptr)
|
||||
c_byte_size = tensor_data[x].data_size
|
||||
if c_byte_size != py_byte_size:
|
||||
f_write.write("The python byte size of " + str(py_byte_size) +
|
||||
" does not match the C++ byte size of " + str(c_byte_size) + "\n")
|
||||
f_write.write("size in bytes = " + str(tensor_data[x].data_size) + "\n")
|
||||
f_write.write("debugger dtype = " + str(tensor_data[x].dtype) + "\n")
|
||||
f_write.write("shape = " + str(tensor_data[x].shape) + "\n")
|
||||
f_write.close()
|
|
@ -0,0 +1,124 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ==============================================================================
|
||||
"""
|
||||
Watchpoints test script for offline debugger APIs.
|
||||
"""
|
||||
|
||||
import mindspore.offline_debug.dbg_services as d
|
||||
import pytest
|
||||
from dump_test_utils import compare_actual_with_expected
|
||||
|
||||
GENERATE_GOLDEN = False
|
||||
test_name = "sync_trans_false_watchpoints"
|
||||
|
||||
|
||||
@pytest.mark.level0
|
||||
@pytest.mark.platform_arm_ascend_training
|
||||
@pytest.mark.platform_x86_ascend_training
|
||||
@pytest.mark.env_onecard
|
||||
def test_sync_trans_false_watchpoints():
|
||||
if GENERATE_GOLDEN:
|
||||
f_write = open(test_name + ".expected", "w")
|
||||
else:
|
||||
f_write = open(test_name + ".actual", "w")
|
||||
|
||||
debugger_backend = d.DbgServices(
|
||||
dump_file_path="/home/workspace/mindspore_dataset/dumps/sync_trans_false/alexnet/")
|
||||
|
||||
_ = debugger_backend.initialize(
|
||||
net_name="Network Name goes here!", is_sync_mode=True)
|
||||
|
||||
# NOTES:
|
||||
# -> watch_condition=6 is MIN_LT
|
||||
# -> watch_condition=18 is CHANGE_TOO_LARGE
|
||||
|
||||
# test 1: watchpoint set and hit (watch_condition=6)
|
||||
param1 = d.Parameter(name="param", disabled=False, value=0.0)
|
||||
_ = debugger_backend.add_watchpoint(watchpoint_id=1, watch_condition=6,
|
||||
check_node_list={"Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/"
|
||||
"Conv2D-op168":
|
||||
{"device_id": [0], "root_graph_id": [0],
|
||||
"is_parameter": False
|
||||
}}, parameter_list=[param1])
|
||||
|
||||
watchpoint_hits_test_1 = debugger_backend.check_watchpoints(iteration=2)
|
||||
if len(watchpoint_hits_test_1) != 1:
|
||||
f_write.write("ERROR -> test 1: watchpoint set but not hit just once")
|
||||
print_watchpoint_hits(watchpoint_hits_test_1, 1, f_write)
|
||||
|
||||
# test 2: watchpoint remove and ensure it's not hit
|
||||
_ = debugger_backend.remove_watchpoint(watchpoint_id=1)
|
||||
watchpoint_hits_test_2 = debugger_backend.check_watchpoints(iteration=2)
|
||||
if watchpoint_hits_test_2:
|
||||
f_write.write("ERROR -> test 2: watchpoint removed but hit")
|
||||
|
||||
# test 3: watchpoint set and not hit, then remove
|
||||
param2 = d.Parameter(name="param", disabled=False, value=-1000.0)
|
||||
_ = debugger_backend.add_watchpoint(watchpoint_id=2, watch_condition=6,
|
||||
check_node_list={"Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/"
|
||||
"Conv2D-op308":
|
||||
{"device_id": [0], "root_graph_id": [0],
|
||||
"is_parameter": False
|
||||
}}, parameter_list=[param2])
|
||||
|
||||
watchpoint_hits_test_3 = debugger_backend.check_watchpoints(iteration=2)
|
||||
if watchpoint_hits_test_3:
|
||||
f_write.write("ERROR -> test 3: watchpoint set but not supposed to be hit")
|
||||
_ = debugger_backend.remove_watchpoint(watchpoint_id=2)
|
||||
|
||||
# test 4: weight change watchpoint set and hit
|
||||
param_abs_mean_update_ratio_gt = d.Parameter(
|
||||
name="abs_mean_update_ratio_gt", disabled=False, value=0.0)
|
||||
param_epsilon = d.Parameter(name="epsilon", disabled=True, value=0.0)
|
||||
_ = debugger_backend.add_watchpoint(watchpoint_id=3, watch_condition=18,
|
||||
check_node_list={"Default/network-WithLossCell/_backbone-AlexNet/fc3-Dense/"
|
||||
"Parameter[6]_11/fc3.bias":
|
||||
{"device_id": [0], "root_graph_id": [0],
|
||||
"is_parameter": True
|
||||
}}, parameter_list=[param_abs_mean_update_ratio_gt,
|
||||
param_epsilon])
|
||||
|
||||
watchpoint_hits_test_4 = debugger_backend.check_watchpoints(iteration=3)
|
||||
if len(watchpoint_hits_test_4) != 1:
|
||||
f_write.write("ERROR -> test 4: watchpoint weight change set but not hit just once")
|
||||
print_watchpoint_hits(watchpoint_hits_test_4, 4, f_write)
|
||||
f_write.close()
|
||||
if not GENERATE_GOLDEN:
|
||||
assert compare_actual_with_expected(test_name)
|
||||
|
||||
|
||||
def print_watchpoint_hits(watchpoint_hits, test_id, f_write):
|
||||
"""Print watchpoint hits."""
|
||||
for x, _ in enumerate(watchpoint_hits):
|
||||
f_write.write("-----------------------------------------------------------\n")
|
||||
f_write.write("watchpoint_hit for test_%u attributes:" % test_id + "\n")
|
||||
f_write.write("name = " + watchpoint_hits[x].name + "\n")
|
||||
f_write.write("slot = " + str(watchpoint_hits[x].slot) + "\n")
|
||||
f_write.write("condition = " + str(watchpoint_hits[x].condition) + "\n")
|
||||
f_write.write("watchpoint_id = " + str(watchpoint_hits[x].watchpoint_id) + "\n")
|
||||
for p, _ in enumerate(watchpoint_hits[x].parameters):
|
||||
f_write.write("parameter " + str(p) + " name = " +
|
||||
watchpoint_hits[x].parameters[p].name + "\n")
|
||||
f_write.write("parameter " + str(p) + " disabled = " +
|
||||
str(watchpoint_hits[x].parameters[p].disabled) + "\n")
|
||||
f_write.write("parameter " + str(p) + " value = " +
|
||||
str(watchpoint_hits[x].parameters[p].value) + "\n")
|
||||
f_write.write("parameter " + str(p) + " hit = " +
|
||||
str(watchpoint_hits[x].parameters[p].hit) + "\n")
|
||||
f_write.write("parameter " + str(p) + " actual_value = " +
|
||||
str(watchpoint_hits[x].parameters[p].actual_value) + "\n")
|
||||
f_write.write("error code = " + str(watchpoint_hits[x].error_code) + "\n")
|
||||
f_write.write("device_id = " + str(watchpoint_hits[x].device_id) + "\n")
|
||||
f_write.write("root_graph_id = " + str(watchpoint_hits[x].root_graph_id) + "\n")
|
|
@ -0,0 +1,85 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ==============================================================================
|
||||
"""
|
||||
Read tensor test script for offline debugger APIs.
|
||||
"""
|
||||
|
||||
import mindspore.offline_debug.dbg_services as d
|
||||
import numpy as np
|
||||
import pytest
|
||||
from dump_test_utils import compare_actual_with_expected
|
||||
|
||||
GENERATE_GOLDEN = False
|
||||
test_name = "sync_trans_true_read_tensors"
|
||||
|
||||
|
||||
@pytest.mark.level0
|
||||
@pytest.mark.platform_arm_ascend_training
|
||||
@pytest.mark.platform_x86_ascend_training
|
||||
@pytest.mark.env_onecard
|
||||
def test_sync_trans_true_read_tensors():
|
||||
debugger_backend = d.DbgServices(
|
||||
dump_file_path="/home/workspace/mindspore_dataset/dumps/sync_trans_true/alexnet/")
|
||||
|
||||
_ = debugger_backend.initialize(
|
||||
net_name="Network Name goes here!", is_sync_mode=True)
|
||||
|
||||
# parameter
|
||||
info1 = d.TensorInfo(node_name="Default/network-WithLossCell/_backbone-AlexNet/conv2-Conv2d/conv2.bias",
|
||||
slot=0, iteration=2, device_id=0, root_graph_id=0, is_parameter=True)
|
||||
# output tensor with zero slot
|
||||
info2 = d.TensorInfo(node_name="Default/network-WithLossCell/_backbone-AlexNet/conv3-Conv2d/Conv2D-op171",
|
||||
slot=0, iteration=2, device_id=0, root_graph_id=0, is_parameter=False)
|
||||
# output tensor with non-zero slot
|
||||
info3 = d.TensorInfo(node_name="Default/network-WithLossCell/_backbone-AlexNet/ReLUV2-op353",
|
||||
slot=1, iteration=2, device_id=0, root_graph_id=0, is_parameter=False)
|
||||
|
||||
tensor_info = [info1, info2, info3]
|
||||
|
||||
tensor_data = debugger_backend.read_tensors(tensor_info)
|
||||
|
||||
print_read_tensors(tensor_info, tensor_data)
|
||||
if not GENERATE_GOLDEN:
|
||||
assert compare_actual_with_expected(test_name)
|
||||
|
||||
|
||||
def print_read_tensors(tensor_info, tensor_data):
|
||||
"""Print read tensors."""
|
||||
if GENERATE_GOLDEN:
|
||||
f_write = open(test_name + ".expected", "w")
|
||||
else:
|
||||
f_write = open(test_name + ".actual", "w")
|
||||
for x, _ in enumerate(tensor_info):
|
||||
f_write.write("-----------------------------------------------------------\n")
|
||||
f_write.write("tensor_info_" + str(x + 1) + " attributes:\n")
|
||||
f_write.write("node name = " + tensor_info[x].node_name + "\n")
|
||||
f_write.write("slot = " + str(tensor_info[x].slot) + "\n")
|
||||
f_write.write("iteration = " + str(tensor_info[x].iteration) + "\n")
|
||||
f_write.write("device_id = " + str(tensor_info[x].device_id) + "\n")
|
||||
f_write.write("root_graph_id = " + str(tensor_info[x].root_graph_id) + "\n")
|
||||
f_write.write("is_parameter = " + str(tensor_info[x].is_parameter) + "\n")
|
||||
f_write.write("\n")
|
||||
f_write.write("tensor_data_" + str(x + 1) + " attributes:\n")
|
||||
f_write.write("data (printed in uint8) = " + str(np.frombuffer(
|
||||
tensor_data[x].data_ptr, np.uint8, tensor_data[x].data_size)) + "\n")
|
||||
py_byte_size = len(tensor_data[x].data_ptr)
|
||||
c_byte_size = tensor_data[x].data_size
|
||||
if c_byte_size != py_byte_size:
|
||||
f_write.write("The python byte size of " + str(py_byte_size) +
|
||||
" does not match the C++ byte size of " + str(c_byte_size) + "\n")
|
||||
f_write.write("size in bytes = " + str(tensor_data[x].data_size) + "\n")
|
||||
f_write.write("debugger dtype = " + str(tensor_data[x].dtype) + "\n")
|
||||
f_write.write("shape = " + str(tensor_data[x].shape) + "\n")
|
||||
f_write.close()
|
Loading…
Reference in New Issue