From 23ace9029e97fa33d6554696816bc66df71f628d Mon Sep 17 00:00:00 2001 From: wangshuide2020 Date: Fri, 21 May 2021 16:40:36 +0800 Subject: [PATCH] add default value for optional attribute. --- mindspore/ops/_op_impl/tbe/basic_lstm_cell_c_state_grad_v2.py | 4 ++-- mindspore/ops/_op_impl/tbe/bounding_box_decode.py | 4 ++-- mindspore/ops/_op_impl/tbe/dynamic_rnn.py | 1 + mindspore/ops/_op_impl/tbe/logsoftmax_grad.py | 2 +- mindspore/ops/_op_impl/tbe/space_to_depth.py | 2 +- mindspore/ops/_op_impl/tbe/strided_slice_d.py | 2 +- mindspore/ops/_op_impl/tbe/unpack.py | 2 +- 7 files changed, 9 insertions(+), 8 deletions(-) diff --git a/mindspore/ops/_op_impl/tbe/basic_lstm_cell_c_state_grad_v2.py b/mindspore/ops/_op_impl/tbe/basic_lstm_cell_c_state_grad_v2.py index 6e54b077d21..433c44b70cd 100644 --- a/mindspore/ops/_op_impl/tbe/basic_lstm_cell_c_state_grad_v2.py +++ b/mindspore/ops/_op_impl/tbe/basic_lstm_cell_c_state_grad_v2.py @@ -22,8 +22,8 @@ basic_lstm_cell_c_state_grad_op_info_v2 = TBERegOp("BasicLSTMCellCStateGradV2") .binfile_name("basic_lstm_cell_c_state_grad.so") \ .compute_cost(10) \ .kernel_name("basic_lstm_cell_c_state_grad_v2") \ - .attr("forget_bias", "optional", "float", "all") \ - .attr("activation", "optional", "str", "all") \ + .attr("forget_bias", "optional", "float", "all", "1") \ + .attr("activation", "optional", "str", "all", "None") \ .partial_flag(True) \ .input(0, "c", False, "required", "all") \ .input(1, "dy", False, "required", "all") \ diff --git a/mindspore/ops/_op_impl/tbe/bounding_box_decode.py b/mindspore/ops/_op_impl/tbe/bounding_box_decode.py index e3f2f5b057f..0e9dee7527e 100644 --- a/mindspore/ops/_op_impl/tbe/bounding_box_decode.py +++ b/mindspore/ops/_op_impl/tbe/bounding_box_decode.py @@ -25,8 +25,8 @@ bounding_box_decode_op_info = TBERegOp("BoundingBoxDecode") \ .partial_flag(True) \ .attr("means", "optional", "listFloat", "all") \ .attr("stds", "optional", "listFloat", "all") \ - .attr("max_shape", "optional", "listInt", "all") \ - .attr("wh_ratio_clip", "optional", "float", "all") \ + .attr("max_shape", "optional", "listInt", "all", "None") \ + .attr("wh_ratio_clip", "optional", "float", "all", "0.016") \ .input(0, "rois", False, "required", "all") \ .input(1, "deltas", False, "required", "all") \ .output(0, "bboxes", False, "required", "all") \ diff --git a/mindspore/ops/_op_impl/tbe/dynamic_rnn.py b/mindspore/ops/_op_impl/tbe/dynamic_rnn.py index da420657b34..b4a45bd6456 100644 --- a/mindspore/ops/_op_impl/tbe/dynamic_rnn.py +++ b/mindspore/ops/_op_impl/tbe/dynamic_rnn.py @@ -32,6 +32,7 @@ dynamic_rnn_op_info = TBERegOp("DynamicRNN") \ .attr("time_major", "optional", "bool", "all", "true") \ .attr("activation", "optional", "str", "all", "tanh") \ .attr("forget_bias", "optional", "float", "all", "0") \ + .attr("gate_order", "optional", "str", "all", "ijfo") \ .attr("is_training", "optional", "bool", "all", "true") \ .partial_flag(True) \ .input(0, "x", False, "required", "all") \ diff --git a/mindspore/ops/_op_impl/tbe/logsoftmax_grad.py b/mindspore/ops/_op_impl/tbe/logsoftmax_grad.py index 9223b821d5d..56692b15897 100644 --- a/mindspore/ops/_op_impl/tbe/logsoftmax_grad.py +++ b/mindspore/ops/_op_impl/tbe/logsoftmax_grad.py @@ -23,7 +23,7 @@ log_softmax_grad_op_info = TBERegOp("LogSoftmaxGrad") \ .compute_cost(10) \ .kernel_name("log_softmax_grad") \ .partial_flag(True) \ - .attr("axis", "optional", "listInt", "all") \ + .attr("axis", "optional", "listInt", "all", "-1") \ .input(0, "x", False, "required", "all") \ .input(1, "grad", False, "required", "all") \ .output(0, "y", False, "required", "all") \ diff --git a/mindspore/ops/_op_impl/tbe/space_to_depth.py b/mindspore/ops/_op_impl/tbe/space_to_depth.py index 2b22a65e4cd..5aafd5c953f 100644 --- a/mindspore/ops/_op_impl/tbe/space_to_depth.py +++ b/mindspore/ops/_op_impl/tbe/space_to_depth.py @@ -24,7 +24,7 @@ space_to_depth_op_info = TBERegOp("SpaceToDepth") \ .kernel_name("space_to_depth") \ .partial_flag(True) \ .attr("block_size", "required", "int", "all") \ - .attr("data_format", "optional", "str", "all") \ + .attr("data_format", "optional", "str", "all", "NHWC") \ .input(0, "x", False, "required", "all") \ .input(1, "filter", False, "optional", "all") \ .output(0, "y", False, "required", "all") \ diff --git a/mindspore/ops/_op_impl/tbe/strided_slice_d.py b/mindspore/ops/_op_impl/tbe/strided_slice_d.py index 73217f344b5..2db94987093 100644 --- a/mindspore/ops/_op_impl/tbe/strided_slice_d.py +++ b/mindspore/ops/_op_impl/tbe/strided_slice_d.py @@ -25,7 +25,7 @@ strided_slice_d_op_info = TBERegOp("StridedSlice") \ .partial_flag(True) \ .attr("begin", "optional", "listInt", "all") \ .attr("end", "optional", "listInt", "all") \ - .attr("strides", "optional", "listInt", "all") \ + .attr("strides", "optional", "listInt", "all", "None") \ .attr("begin_mask", "required", "int", "all") \ .attr("end_mask", "required", "int", "all") \ .attr("ellipsis_mask", "required", "int", "all") \ diff --git a/mindspore/ops/_op_impl/tbe/unpack.py b/mindspore/ops/_op_impl/tbe/unpack.py index a66554710c7..4cf4a316805 100644 --- a/mindspore/ops/_op_impl/tbe/unpack.py +++ b/mindspore/ops/_op_impl/tbe/unpack.py @@ -23,7 +23,7 @@ unstack_op_info = TBERegOp("Unstack") \ .compute_cost(10) \ .kernel_name("unpack") \ .partial_flag(True) \ - .attr("num", "optional", "int", "all") \ + .attr("num", "optional", "int", "all", "None") \ .attr("axis", "required", "int", "all") \ .input(0, "x", False, "required", "all") \ .output(0, "y", False, "dynamic", "all") \