From 7d1f3d71ba431aedc7dbd053ac4618cebe2d6cd5 Mon Sep 17 00:00:00 2001 From: Margaret_wangrui Date: Thu, 6 Jan 2022 20:15:56 +0800 Subject: [PATCH] Optimize error message --- mindspore/ccsrc/frontend/operator/composite/composite.cc | 2 +- .../ccsrc/frontend/operator/composite/do_signature.cc | 2 +- mindspore/ccsrc/frontend/operator/composite/map.cc | 4 ++-- .../ccsrc/frontend/operator/ops_front_infer_function.cc | 7 ++++--- tests/syntax/simple_expression/test_map.py | 4 ++-- tests/syntax/simple_expression/test_operator.py | 2 +- tests/ut/python/optimizer/test_auto_grad.py | 8 +------- 7 files changed, 12 insertions(+), 17 deletions(-) diff --git a/mindspore/ccsrc/frontend/operator/composite/composite.cc b/mindspore/ccsrc/frontend/operator/composite/composite.cc index d470716f358..cb7bf6487a5 100644 --- a/mindspore/ccsrc/frontend/operator/composite/composite.cc +++ b/mindspore/ccsrc/frontend/operator/composite/composite.cc @@ -769,7 +769,7 @@ FuncGraphPtr GradOperation::GenerateFuncGraph(const AbstractBasePtrList &args_sp MS_EXCEPTION_IF_NULL(args_spec_list[0]); AbstractFunctionPtr fn = dyn_cast(args_spec_list[0]); if (fn == nullptr) { - MS_LOG(EXCEPTION) << "'GradOperation' arg0 must be a 'Function' or 'Cell', but got " + MS_LOG(EXCEPTION) << "For 'GradOperation', the first argument must be a 'Function' or 'Cell', but got " << args_spec_list[0]->ToString(); } diff --git a/mindspore/ccsrc/frontend/operator/composite/do_signature.cc b/mindspore/ccsrc/frontend/operator/composite/do_signature.cc index f3046f6318e..133fb4732c2 100644 --- a/mindspore/ccsrc/frontend/operator/composite/do_signature.cc +++ b/mindspore/ccsrc/frontend/operator/composite/do_signature.cc @@ -55,7 +55,7 @@ void ProcessDefault(const std::string &func_name, size_t actual_param_number, co for (size_t i = actual_param_number; i < sig_size; ++i) { auto default_value = signature[i].default_value; if (default_value == nullptr) { - MS_LOG(EXCEPTION) << "The size of input in the operator should be " << sig_size << ", but got " + MS_LOG(EXCEPTION) << "For '" << func_name << "', the size of input should be " << sig_size << ", but got " << actual_param_number << ". Please check inputs of the operator."; } else { (*op_inputs).push_back(NewValueNode(default_value)); diff --git a/mindspore/ccsrc/frontend/operator/composite/map.cc b/mindspore/ccsrc/frontend/operator/composite/map.cc index 54745d9f467..55e3ad86b33 100644 --- a/mindspore/ccsrc/frontend/operator/composite/map.cc +++ b/mindspore/ccsrc/frontend/operator/composite/map.cc @@ -109,7 +109,7 @@ AnfNodePtr Map::FullMakeList(const std::shared_ptr &type, const FuncGraphP } } if (is_not_same) { - MS_LOG(EXCEPTION) << "The length of lists in Map must be the same. " << oss.str(); + MS_LOG(EXCEPTION) << "For 'Map', the length of lists must be the same. " << oss.str(); } constexpr size_t kPrimHoldLen = 1; @@ -170,7 +170,7 @@ AnfNodePtr Map::FullMakeTuple(const std::shared_ptr &type, const FuncGrap } } if (is_not_same) { - MS_LOG(EXCEPTION) << "The length of tuples in Map must be the same. " << oss.str(); + MS_LOG(EXCEPTION) << "For 'Map', the length of tuples must be the same. " << oss.str(); } constexpr size_t kPrimHoldLen = 1; diff --git a/mindspore/ccsrc/frontend/operator/ops_front_infer_function.cc b/mindspore/ccsrc/frontend/operator/ops_front_infer_function.cc index 056c7813e3f..e11ab11d2fd 100644 --- a/mindspore/ccsrc/frontend/operator/ops_front_infer_function.cc +++ b/mindspore/ccsrc/frontend/operator/ops_front_infer_function.cc @@ -631,7 +631,8 @@ AbstractBasePtr InferImplMakeRange(const AnalysisEnginePtr &, const PrimitivePtr if (slide.step <= 0) { MS_LOG(EXCEPTION) << "For 'range', while the argument 'start' " << slide.start << " is less than or equal to the argument 'stop' " << slide.stop << ", " - << "the argument 'step' must be more than 0, but the argument 'step' is " << slide.step << "."; + << "the argument 'step' must be greater than 0, but the argument 'step' is " << slide.step + << "."; } for (int64_t i = slide.start; i < slide.stop; i += slide.step) { @@ -643,7 +644,7 @@ AbstractBasePtr InferImplMakeRange(const AnalysisEnginePtr &, const PrimitivePtr } } else { if (slide.step >= 0) { - MS_LOG(EXCEPTION) << "For 'range', while the argument 'start' " << slide.start << " is more than the argument " + MS_LOG(EXCEPTION) << "For 'range', while the argument 'start' " << slide.start << " is greater than the argument " << "'stop' " << slide.stop << ", the argument 'step' must be less than 0, " << "but the argument 'step' is " << slide.step << "."; } @@ -773,7 +774,7 @@ AbstractBasePtr InferImplMakeRecord(const AnalysisEnginePtr &, const PrimitivePt const AbstractBasePtrList &args_spec_list) { // Inputs: at lease two objects of a subclass of AbstractBase. if (args_spec_list.size() < 2) { - MS_LOG(EXCEPTION) << "The size of arguments of MakeRecord operator must more than 1, but the input size is " + MS_LOG(EXCEPTION) << "The size of arguments of MakeRecord operator must greater than 1, but the input size is " << args_spec_list.size() << "."; } diff --git a/tests/syntax/simple_expression/test_map.py b/tests/syntax/simple_expression/test_map.py index 7fcdffcf9cf..c3eeaf67d8d 100644 --- a/tests/syntax/simple_expression/test_map.py +++ b/tests/syntax/simple_expression/test_map.py @@ -118,7 +118,7 @@ def test_map_args_full_make_list_same_length(): input_me_y = Tensor(np.random.randn(2, 3, 4, 5).astype(np.float32)) net = MapNet() - with pytest.raises(Exception, match="The length of lists in Map must be the same"): + with pytest.raises(Exception, match="For 'Map', the length of lists must be the same."): ret = net([input_me_x], [input_me_y, input_me_y]) print("ret:", ret) @@ -142,7 +142,7 @@ def test_map_args_full_make_tuple_same_length(): input_me_y = Tensor(np.random.randn(2, 3, 4, 5).astype(np.float32)) net = MapNet() - with pytest.raises(Exception, match="The length of tuples in Map must be the same."): + with pytest.raises(Exception, match="For 'Map', the length of tuples must be the same."): ret = net((input_me_x, input_me_x), (input_me_y, input_me_y, input_me_y)) print("ret:", ret) diff --git a/tests/syntax/simple_expression/test_operator.py b/tests/syntax/simple_expression/test_operator.py index f3a51f3a51a..8695483b81c 100644 --- a/tests/syntax/simple_expression/test_operator.py +++ b/tests/syntax/simple_expression/test_operator.py @@ -84,7 +84,7 @@ def test_inner_scalar_mod_args_length(): x = Tensor(2, dtype=ms.int32) net = Net() - with pytest.raises(Exception, match="The size of input in the operator should be 2"): + with pytest.raises(Exception, match="For 'S-Prim-Mod', the size of input should be 2"): ret = net(x) print("ret:", ret) diff --git a/tests/ut/python/optimizer/test_auto_grad.py b/tests/ut/python/optimizer/test_auto_grad.py index 72eaa357e22..8643fbd3d81 100644 --- a/tests/ut/python/optimizer/test_auto_grad.py +++ b/tests/ut/python/optimizer/test_auto_grad.py @@ -154,9 +154,6 @@ def test_second_grad_with_j_primitive(): def test_ad_fv_cnode_order(): context.set_context(mode=context.GRAPH_MODE) class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - # cnode xay is not being MapMorphism when cnode second_level() is being MapMorphism and # BackPropagateFv as MapMorphism is started from output node and from left to right order. def construct(self, x, y): @@ -256,9 +253,6 @@ def test_limit_lift_fv_scope(): def test_same_primal_used_by_multi_j(): class Net(nn.Cell): - def __init__(self): - super(Net, self).__init__() - def construct(self, x): return x @@ -466,7 +460,7 @@ def test_grad_net_is_none(): try: GradNetWrtX(Net())(x, y) except Exception as e: - assert "'GradOperation' arg0 must be a 'Function' or 'Cell', but got" in str(e) + assert "For 'GradOperation', the first argument must be a 'Function' or 'Cell', but got" in str(e) def test_grad_missing_net():