From 7dfa94c48308761ce9b50d1844e471fba8905c9c Mon Sep 17 00:00:00 2001 From: andy_wangrui Date: Tue, 5 Jul 2022 09:55:15 +0800 Subject: [PATCH] add jjf 6c8dc54bceb46250fe47653d28d89872dc3dc34f --- mindspore/python/mindspore/ops/_op_impl/tbe/batchnorm.py | 9 +++++---- .../python/mindspore/ops/_op_impl/tbe/batchnorm_grad.py | 9 +++++---- tests/st/ops/ascend/test_one_hot.py | 4 ++-- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/mindspore/python/mindspore/ops/_op_impl/tbe/batchnorm.py b/mindspore/python/mindspore/ops/_op_impl/tbe/batchnorm.py index ecbf979f3be..5de71f1dda3 100644 --- a/mindspore/python/mindspore/ops/_op_impl/tbe/batchnorm.py +++ b/mindspore/python/mindspore/ops/_op_impl/tbe/batchnorm.py @@ -36,18 +36,19 @@ batch_norm_op_info = TBERegOp("BatchNorm") \ .output(2, "batch_variance", False, "required", "all") \ .output(3, "reserve_space_1", False, "optional", "all") \ .output(4, "reserve_space_2", False, "optional", "all") \ + .output(5, "reserve_space_3", False, "optional", "all") \ .dtype_format(DataType.F16_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F16_Default, DataType.F32_Default, DataType.F32_Default, - DataType.F32_Default, DataType.F32_Default) \ + DataType.F32_Default, DataType.F32_Default, DataType.F32_Default) \ .dtype_format(DataType.F16_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F16_5HD, DataType.F32_5HD, DataType.F32_5HD, - DataType.F32_5HD, DataType.F32_5HD) \ + DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD) \ .dtype_format(DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, - DataType.F32_Default, DataType.F32_Default) \ + DataType.F32_Default, DataType.F32_Default, DataType.F32_Default) \ .dtype_format(DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, - DataType.F32_5HD, DataType.F32_5HD) \ + DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD) \ .get_op_info() diff --git a/mindspore/python/mindspore/ops/_op_impl/tbe/batchnorm_grad.py b/mindspore/python/mindspore/ops/_op_impl/tbe/batchnorm_grad.py index bad8e050090..b17b73da8d8 100644 --- a/mindspore/python/mindspore/ops/_op_impl/tbe/batchnorm_grad.py +++ b/mindspore/python/mindspore/ops/_op_impl/tbe/batchnorm_grad.py @@ -31,22 +31,23 @@ batch_norm_grad_op_info = TBERegOp("BatchNormGrad") \ .input(2, "scale", False, "required", "all") \ .input(3, "reserve_space_1", False, "required", "all") \ .input(4, "reserve_space_2", False, "required", "all") \ + .input(5, "reserve_space_3", False, "required", "all") \ .output(0, "x_backprop", False, "required", "all") \ .output(1, "scale_backprop", False, "required", "all") \ .output(2, "offset_backprop", False, "required", "all") \ .output(3, "reserve_space_4", False, "optional", "all") \ .output(4, "reserve_space_5", False, "optional", "all") \ .dtype_format(DataType.F16_Default, DataType.F16_Default, DataType.F32_Default, DataType.F32_Default, - DataType.F32_Default, DataType.F16_Default, DataType.F32_Default, + DataType.F32_Default, DataType.F32_Default, DataType.F16_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default) \ .dtype_format(DataType.F16_5HD, DataType.F16_5HD, DataType.F32_5HD, DataType.F32_5HD, - DataType.F32_5HD, DataType.F16_5HD, DataType.F32_5HD, + DataType.F32_5HD, DataType.F32_5HD, DataType.F16_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD) \ .dtype_format(DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, - DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, + DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default) \ .dtype_format(DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, - DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, + DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD, DataType.F32_5HD) \ .get_op_info() diff --git a/tests/st/ops/ascend/test_one_hot.py b/tests/st/ops/ascend/test_one_hot.py index 57f277e6019..882a7faf1da 100644 --- a/tests/st/ops/ascend/test_one_hot.py +++ b/tests/st/ops/ascend/test_one_hot.py @@ -128,7 +128,7 @@ def one_hot_static_shape_all_types(): one_hot_static_shape_test_case(np.int32, mstype.float32, np.float32) -@pytest.mark.level1 +@pytest.mark.level0 @pytest.mark.platform_arm_ascend_training @pytest.mark.platform_x86_ascend_training @pytest.mark.env_onecard @@ -143,7 +143,7 @@ def test_ascend_graph_mode(): one_hot_dynamic_shape_test_case(np.int32, mstype.float32, np.float32) -@pytest.mark.level1 +@pytest.mark.level0 @pytest.mark.platform_arm_ascend_training @pytest.mark.platform_x86_ascend_training @pytest.mark.env_onecard