diff --git a/mindspore/core/ops/abs.cc b/mindspore/core/ops/abs.cc index 1f57ecbbb0f..31ac579b100 100644 --- a/mindspore/core/ops/abs.cc +++ b/mindspore/core/ops/abs.cc @@ -135,8 +135,8 @@ ValuePtr AbsInferValue(const PrimitivePtr &prim, const std::vectorname() << "', the supported data type is ['int8', 'int16', 'int32', 'int64', 'uint8', " - "'uint16','uint32', 'uint64','float16', 'float32', 'float64'], but got " - << x_tensor->ToString(); + "'uint16','uint32', 'uint64','float16', 'float32', 'float64'], but got: " + << x_tensor->ToString() << "."; } } return result_tensor; diff --git a/mindspore/core/ops/accumulate_n_v2.cc b/mindspore/core/ops/accumulate_n_v2.cc index 0754e42cdd4..6427c7d379a 100644 --- a/mindspore/core/ops/accumulate_n_v2.cc +++ b/mindspore/core/ops/accumulate_n_v2.cc @@ -52,9 +52,9 @@ abstract::ShapePtr AccumulateNV2InferShape(const PrimitivePtr &primitive, } } if (*shape != *shape_0) { - MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', Shape of input[" << i - << "] should be not consistent with the shape of input[0], but got shape of input[" << i - << "]: " << shape->ToString() << ", shape of input[0]: " << shape_0->ToString(); + MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', shape of input[" << i + << "] must be consistent with the shape of input[0], but got shape of input[" << i + << "]: " << shape->ToString() << ", shape of input[0]: " << shape_0->ToString() << "."; } } auto in_shape = element0_shape_map[kShape]; diff --git a/mindspore/core/ops/addn.cc b/mindspore/core/ops/addn.cc index b86d53440b2..9a3683cc753 100644 --- a/mindspore/core/ops/addn.cc +++ b/mindspore/core/ops/addn.cc @@ -44,16 +44,17 @@ abstract::ShapePtr AddNInferShape(const PrimitivePtr &primitive, const std::vect const auto &shape_0_vec = shape_0->cast()->shape(); if ((shape_vec == ShapeVector({1}) && shape_0_vec == ShapeVector()) || (shape_vec == ShapeVector() && shape_0_vec == ShapeVector({1}))) { - MS_LOG(DEBUG) << "The primitive[" << primitive->name() << "]'s input[" << i << "] shape: " << shape->ToString() - << " are consistent with the shape of input[0]" << shape_0->ToString(); + MS_LOG(DEBUG) << "For '" << primitive->name() << "', shape of input[" << i + << "] and that of input[0] must be the same. But got shape of input[" << i + << "]: " << shape->ToString() << ", shape of input[0]: " << shape_0->ToString() << "."; continue; } } if (!shape->IsDynamic() && !shape_0->IsDynamic()) { if (*shape != *shape_0) { - MS_EXCEPTION(ValueError) << "The primitive[" << primitive->name() << "]'s input shape must be same, " - << "but got the shape of input[" << i << "]: " << shape->ToString() - << ", shape of input[0]:" << shape_0->ToString(); + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', input shape must be same, but got shape of input[" << i + << "]: " << shape->ToString() << ", shape of input[0]: " << shape_0->ToString() << "."; } } } diff --git a/mindspore/core/ops/apply_ada_max.cc b/mindspore/core/ops/apply_ada_max.cc index 8640197c423..770d9404637 100644 --- a/mindspore/core/ops/apply_ada_max.cc +++ b/mindspore/core/ops/apply_ada_max.cc @@ -83,20 +83,23 @@ abstract::TupleShapePtr ApplyAdaMaxInferShape(const PrimitivePtr &primitive, same_shape_args_map.insert({"grad", grad_shape}); if (!var_shape_ptr->IsDynamic() && !m_shape_ptr->IsDynamic()) { if (*m_shape != *var_shape) { - MS_EXCEPTION(ValueError) << primitive->name() << " evaluator arg m shape " << m_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', evaluator arg 'm' and 'var' must have the same shape. But got 'm' shape: " + << m_shape->ToString() << ", 'var' shape: " << var_shape->ToString() << "."; } } if (!v_shape_ptr->IsDynamic() && !var_shape_ptr->IsDynamic()) { if (*v_shape != *var_shape) { - MS_EXCEPTION(ValueError) << primitive->name() << " evaluator arg v shape " << v_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', evaluator arg 'v' and 'var' must have the same shape. But got 'v' shape: " + << v_shape->ToString() << ", 'var' shape: " << var_shape->ToString() << "."; } } if (!grad_shape_ptr->IsDynamic() && !var_shape_ptr->IsDynamic()) { if (*grad_shape != *var_shape) { - MS_EXCEPTION(ValueError) << primitive->name() << " evaluator arg grad shape " << grad_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', evaluator arg 'grad' and 'var' must have the same shape. But got 'grad' shape: " + << grad_shape->ToString() << ", 'var' shape: " << var_shape->ToString() << "."; } } diff --git a/mindspore/core/ops/apply_adadelta.cc b/mindspore/core/ops/apply_adadelta.cc index 95029d79463..f9e92eb84de 100644 --- a/mindspore/core/ops/apply_adadelta.cc +++ b/mindspore/core/ops/apply_adadelta.cc @@ -46,22 +46,28 @@ abstract::TupleShapePtr ApplyAdadeltaInferShape(const PrimitivePtr &primitive, // var and accum must have the same shape when is not dynamic if (!var_shape_ptr->IsDynamic() && !accum_shape_ptr->IsDynamic()) { if (*var_shape != *accum_shape) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg accum shape " << accum_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) + << "For '" << prim_name + << "', 'var' and 'accum' must have the same shape when is not dynamic. But got 'var' shape: " + << var_shape->ToString() << ", 'accum' shape: " << accum_shape->ToString() << "."; } } // var and accum update must have the same shape when is not dynamic if (!var_shape_ptr->IsDynamic() && !accum_update_shape_ptr->IsDynamic()) { if (*var_shape != *accum_update_shape) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg accum update shape " << accum_update_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) + << "For '" << prim_name + << "', 'var' and 'accum_update' must have the same shape when is not dynamic. But got 'var' shape: " + << var_shape->ToString() << ", 'accum_update' shape: " << accum_update_shape->ToString() << "."; } } // var and grad must have the same shape when is not dynamic if (!var_shape_ptr->IsDynamic() && !grad_shape_ptr->IsDynamic()) { if (*var_shape != *grad_shape) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg grad shape " << grad_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) + << "For '" << prim_name + << "', 'var' and 'grad' must have the same shape when is not dynamic. But got 'var' shape: " + << var_shape->ToString() << ", 'grad' shape: " << grad_shape->ToString() << "."; } } const int64_t kShapeSize = 1; diff --git a/mindspore/core/ops/apply_adagrad.cc b/mindspore/core/ops/apply_adagrad.cc index b720b0f1a68..287a7541cf1 100644 --- a/mindspore/core/ops/apply_adagrad.cc +++ b/mindspore/core/ops/apply_adagrad.cc @@ -52,8 +52,10 @@ abstract::TupleShapePtr ApplyAdagradInferShape(const PrimitivePtr &primitive, same_shape_args_map.insert({"grad", grad_shape_ptr}); for (auto &elem : same_shape_args_map) { if (*elem.second != *var_shape_ptr) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg " << elem.first << " shape " << elem.second->ToString() - << " are not consistent with var shape " << var_shape_ptr->ToString(); + MS_EXCEPTION(ValueError) << "For '" << prim_name << "', evaluator arg '" << elem.first + << "' and 'var' must have the same shape. But got '" << elem.first + << "' shape: " << elem.second->ToString() + << ", 'var' shape: " << var_shape_ptr->ToString() << "."; } } return std::make_shared(std::vector{var_shape_ptr, accum_shape_ptr}); diff --git a/mindspore/core/ops/apply_adagrad_v2.cc b/mindspore/core/ops/apply_adagrad_v2.cc index f4711e4a50a..44e363a8c80 100644 --- a/mindspore/core/ops/apply_adagrad_v2.cc +++ b/mindspore/core/ops/apply_adagrad_v2.cc @@ -51,9 +51,10 @@ abstract::TupleShapePtr ApplyAdagradV2InferShape(const PrimitivePtr &primitive, same_shape_args_map.insert({"grad", grad_shape}); for (auto &elem : same_shape_args_map) { if (*elem.second != *var_shape) { - MS_EXCEPTION(ValueError) << primitive->name() << " evaluator arg " << elem.first << " shape " - << elem.second->ToString() << " are not consistent with var shape " - << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', evaluator arg '" << elem.first + << "' and 'var' must have the same shape. But got '" << elem.first + << "' shape: " << elem.second->ToString() << ", 'var' shape: " << var_shape->ToString() + << "."; } } return std::make_shared(std::vector{var_shape, accum_shape}); diff --git a/mindspore/core/ops/apply_adam_with_amsgrad.cc b/mindspore/core/ops/apply_adam_with_amsgrad.cc index 0b31c609f3c..37604515d8f 100644 --- a/mindspore/core/ops/apply_adam_with_amsgrad.cc +++ b/mindspore/core/ops/apply_adam_with_amsgrad.cc @@ -56,8 +56,10 @@ abstract::TupleShapePtr ApplyAdamWithAmsgradInferShape(const PrimitivePtr &primi same_shape_args_map.insert({"grad", grad_shape}); for (auto &elem : same_shape_args_map) { if (*elem.second != *var_shape) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg " << elem.first << " shape " << elem.second->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << prim_name << "', evaluator arg '" << elem.first + << "' and 'var' must have the same shape. But got '" << elem.first + << "' shape: " << elem.second->ToString() << ", 'var' shape: " << var_shape->ToString() + << "."; } } return std::make_shared( diff --git a/mindspore/core/ops/apply_add_sign.cc b/mindspore/core/ops/apply_add_sign.cc index 78382cd00e1..be58cf99b42 100644 --- a/mindspore/core/ops/apply_add_sign.cc +++ b/mindspore/core/ops/apply_add_sign.cc @@ -48,14 +48,16 @@ abstract::TupleShapePtr ApplyAddSignInferShape(const PrimitivePtr &primitive, auto grad_shape_ptr = grad_shape->cast(); if (!m_shape_ptr->IsDynamic() && !var_shape_ptr->IsDynamic()) { if (*m_shape != *var_shape) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg m shape " << m_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "', evaluator arg 'm' and 'var' must have the same shape. But got 'm' shape: " + << m_shape->ToString() << ", 'var' shape: " << var_shape->ToString() << "."; } } if (!grad_shape_ptr->IsDynamic() && !var_shape_ptr->IsDynamic()) { if (*grad_shape != *var_shape) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg grad shape " << grad_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "', evaluator arg 'grad' and 'var' must have the same shape. But got 'grad' shape: " + << grad_shape->ToString() << ", 'var' shape: " << var_shape->ToString() << "."; } } const int64_t kShapeSize = 1; diff --git a/mindspore/core/ops/apply_centered_rms_prop.cc b/mindspore/core/ops/apply_centered_rms_prop.cc index e4f171c140e..c7e7af383d1 100644 --- a/mindspore/core/ops/apply_centered_rms_prop.cc +++ b/mindspore/core/ops/apply_centered_rms_prop.cc @@ -42,29 +42,33 @@ abstract::ShapePtr ApplyCenteredRMSPropInferShape(const PrimitivePtr &primitive, // var and mg must have the same shape when is not dynamic if (!var_shape_ptr->IsDynamic() && !mg_shape_ptr->IsDynamic()) { if (*var_shape != *mg_shape) { - MS_EXCEPTION(ValueError) << op_name << " mean gradient shape " << mg_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << op_name + << "', 'mean_gradient'must have the same shape as 'var'. But got 'mean_gradient' shape: " + << mg_shape->ToString() << ", 'var' shape: " << var_shape->ToString() << "."; } } // var and ms must have the same shape when is not dynamic if (!var_shape_ptr->IsDynamic() && !ms_shape_ptr->IsDynamic()) { if (*var_shape != *ms_shape) { - MS_EXCEPTION(ValueError) << op_name << " mean square shape " << ms_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << op_name + << "', 'mean_square' must have the same shape as 'var'. But got 'mean_square' shape: " + << ms_shape->ToString() << ", 'var' shape: " << var_shape->ToString() << "."; } } // var and mom must have the same shape when is not dynamic if (!var_shape_ptr->IsDynamic() && !mom_shape_ptr->IsDynamic()) { if (*var_shape != *mom_shape) { - MS_EXCEPTION(ValueError) << op_name << " moment shape " << mom_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << op_name + << "', 'moment' must have the same shape as 'var'. But got 'moment' shape: " + << mom_shape->ToString() << ", 'var' shape: " << var_shape->ToString() << "."; } } // var and grad must have the same shape when is not dynamic if (!var_shape_ptr->IsDynamic() && !grad_shape_ptr->IsDynamic()) { if (*var_shape != *grad_shape) { - MS_EXCEPTION(ValueError) << op_name << " grad shape " << grad_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << op_name + << "', 'grad' must have the same shape as 'var'. But got 'grad' shape: " + << grad_shape->ToString() << ", 'var' shape: " << var_shape->ToString() << "."; } } auto shape_element = var_shape->cast(); diff --git a/mindspore/core/ops/apply_ftrl.cc b/mindspore/core/ops/apply_ftrl.cc index 5f3f20d745a..ee1617d4ae9 100644 --- a/mindspore/core/ops/apply_ftrl.cc +++ b/mindspore/core/ops/apply_ftrl.cc @@ -44,8 +44,10 @@ abstract::ShapePtr ApplyFtrlInferShape(const PrimitivePtr &primitive, const std: same_shape_args_map.insert({"linear", linear_shape}); for (auto &elem : same_shape_args_map) { if (*elem.second != *var_shape) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg " << elem.first << " shape " << elem.second->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << prim_name << "', evaluator arg '" << elem.first + << "' must have the same shape as 'var'. But got '" << elem.first + << "' shape: " << elem.second->ToString() << ", 'var' shape: " << var_shape->ToString() + << "."; } } auto shape_ptr = var_shape->cast(); diff --git a/mindspore/core/ops/apply_gradient_descent.cc b/mindspore/core/ops/apply_gradient_descent.cc index f145d1c34a4..c902d94d7b4 100644 --- a/mindspore/core/ops/apply_gradient_descent.cc +++ b/mindspore/core/ops/apply_gradient_descent.cc @@ -39,8 +39,9 @@ abstract::ShapePtr ApplyGradientDescentInferShape(const PrimitivePtr &primitive, auto delta_shape_ptr = delta_shape->cast(); if (!var_shape_ptr->IsDynamic() && !delta_shape_ptr->IsDynamic()) { if (*var_shape != *delta_shape) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg delta shape " << delta_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "', evaluator arg 'delta' must have the same shape as 'var'. But got 'delta' shape: " + << delta_shape->ToString() << ", 'var' shape: " << var_shape->ToString() << "."; } } // alpha must be a scalar [Number, Tensor] diff --git a/mindspore/core/ops/apply_keras_momentum.cc b/mindspore/core/ops/apply_keras_momentum.cc index e0a95c4132c..a84ec174a97 100644 --- a/mindspore/core/ops/apply_keras_momentum.cc +++ b/mindspore/core/ops/apply_keras_momentum.cc @@ -50,8 +50,10 @@ abstract::TupleShapePtr ApplyKerasMomentumInferShape(const PrimitivePtr &primiti same_shape_args_map.insert({"grad", grad_shape}); for (auto &elem : same_shape_args_map) { if (*elem.second != *var_shape) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg " << elem.first << " shape " << elem.second->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << prim_name << "', evaluator arg '" << elem.first + << "' must have the same shape as 'var'. But got '" << elem.first + << "' shape: " << elem.second->ToString() << ", 'var' shape: " << var_shape->ToString() + << "."; } } return std::make_shared(std::vector{var_shape, accum_shape}); diff --git a/mindspore/core/ops/apply_power_sign_d.cc b/mindspore/core/ops/apply_power_sign_d.cc index 8ae3d9e2d57..a6407427f6d 100644 --- a/mindspore/core/ops/apply_power_sign_d.cc +++ b/mindspore/core/ops/apply_power_sign_d.cc @@ -73,8 +73,10 @@ abstract::TupleShapePtr ApplyPowerSignDInferShape(const PrimitivePtr &primitive, same_shape_args_map.insert({"grad", grad_shape}); for (auto &elem : same_shape_args_map) { if (*elem.second != *var_shape) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg " << elem.first << " shape " << elem.second->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << prim_name << "', evaluator arg '" << elem.first + << "' must have the same shape as 'var'. But got '" << elem.first + << "' shape: " << elem.second->ToString() << ", 'var' shape: " << var_shape->ToString() + << "."; } } return std::make_shared(std::vector{var_shape, m_shape}); diff --git a/mindspore/core/ops/apply_proximal_adagrad.cc b/mindspore/core/ops/apply_proximal_adagrad.cc index de57034d641..82534d48e81 100644 --- a/mindspore/core/ops/apply_proximal_adagrad.cc +++ b/mindspore/core/ops/apply_proximal_adagrad.cc @@ -62,8 +62,10 @@ abstract::TupleShapePtr ApplyProximalAdagradInferShape(const PrimitivePtr &primi same_shape_args_map.insert({"grad", grad_shape_ptr}); for (auto &elem : same_shape_args_map) { if (*elem.second != *var_shape_ptr) { - MS_EXCEPTION(ValueError) << prim_name << " evaluator arg " << elem.first << " shape " << elem.second->ToString() - << " are not consistent with var shape " << var_shape_ptr->ToString(); + MS_EXCEPTION(ValueError) << "For '" << prim_name << "', evaluator arg '" << elem.first + << "' must have the same shape as 'var'. But got '" << elem.first + << "' shape: " << elem.second->ToString() + << ", 'var' shape: " << var_shape_ptr->ToString() << "."; } } return std::make_shared(std::vector{var_shape_ptr, accum_shape_ptr}); diff --git a/mindspore/core/ops/apply_proximal_gradient_descent.cc b/mindspore/core/ops/apply_proximal_gradient_descent.cc index 139b025633d..6e264229805 100644 --- a/mindspore/core/ops/apply_proximal_gradient_descent.cc +++ b/mindspore/core/ops/apply_proximal_gradient_descent.cc @@ -61,8 +61,9 @@ abstract::ShapePtr ApplyProximalGradientDescentInferShape(const PrimitivePtr &pr auto delta_shape_ptr = delta_shape->cast(); if (!var_shape_ptr->IsDynamic() && !delta_shape_ptr->IsDynamic()) { if (*var_shape != *delta_shape) { - MS_EXCEPTION(ValueError) << primitive->name() << " evaluator arg delta shape " << delta_shape->ToString() - << " are not consistent with var shape " << var_shape->ToString(); + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', evaluator arg 'delta' must have the same shape as 'var'. But got 'delta' shape: " + << delta_shape->ToString() << ", 'var' shape: " << var_shape->ToString() << "."; } } auto shape_element = var_shape->cast(); diff --git a/mindspore/core/ops/approximate_equal.cc b/mindspore/core/ops/approximate_equal.cc index 3cd47370abb..dc78748590d 100644 --- a/mindspore/core/ops/approximate_equal.cc +++ b/mindspore/core/ops/approximate_equal.cc @@ -41,8 +41,9 @@ abstract::ShapePtr ApproximateEqualInferShape(const PrimitivePtr &primitive, MS_EXCEPTION_IF_NULL(shape_ptr_x2); if (!shape_ptr_x1->IsDynamic() && !shape_ptr_x2->IsDynamic()) { if (shape_ptr_x1->shape() != shape_ptr_x2->shape()) { - MS_EXCEPTION(ArgumentError) << "x1 shape and x2 shape should be same, but x1 shape: " << shape_ptr_x1->ToString() - << ", x2 shape:" << shape_ptr_x2->ToString(); + MS_EXCEPTION(ArgumentError) << "For '" << prim_name + << "', arg 'x1' must have the same shape as 'x2'. But got 'x1' shape: " + << shape_ptr_x1->ToString() << ", 'x2' shape: " << shape_ptr_x2->ToString() << "."; } } return shape_ptr_x1; diff --git a/mindspore/core/ops/assign.cc b/mindspore/core/ops/assign.cc index 08ba36fa45c..28c8e6d616c 100644 --- a/mindspore/core/ops/assign.cc +++ b/mindspore/core/ops/assign.cc @@ -43,14 +43,17 @@ abstract::ShapePtr AssignInferShape(const PrimitivePtr &prim, const std::vector< } else if (value_shape.size() == 1 && value_shape[0] == 1 && variable_shape.empty()) { return shape_element; } else { - MS_EXCEPTION(ValueError) << "For " << prim_name << ", the rank of value is " << value_shape.size() - << ". It should be same with variable's rank " << variable_shape.size() << "."; + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "','value' must have the same rank as 'variable'. But got 'value' rank: " + << value_shape.size() << ", 'variable' rank: " << variable_shape.size() << "."; } } for (uint64_t i = 0; i < variable_shape.size(); i++) { if (variable_shape[i] != value_shape[i]) { - MS_EXCEPTION(ValueError) << "For " << prim_name << ", the shape of value is " << value_shape_ptr->ToString() - << ". It should be same with variable's shape " << variable_shape_ptr->ToString() << "."; + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "','value' must have the same shape as 'variable'. But got 'value' shape: " + << value_shape_ptr->ToString() + << ", 'variable' shape: " << variable_shape_ptr->ToString() << "."; } } return shape_element; diff --git a/mindspore/core/ops/avg_pool_3d.cc b/mindspore/core/ops/avg_pool_3d.cc index a8ea703ed71..6db412a8c90 100644 --- a/mindspore/core/ops/avg_pool_3d.cc +++ b/mindspore/core/ops/avg_pool_3d.cc @@ -37,12 +37,13 @@ void GetAttrs(const PrimitivePtr &primitive, std::vector *kernel_size, // attr kernel size *kernel_size = GetValue>(primitive->GetAttr(kKernelSize)); if (kernel_size->size() != kKernelDims) { - MS_LOG(EXCEPTION) << "For '" << primitive->name() << "', kernel_size must be 5, but got " << kernel_size->size(); + MS_LOG(EXCEPTION) << "For '" << primitive->name() << "', 'kernel_size' must be 5, but got " << kernel_size->size() + << "."; } // attr strides *strides = GetValue>(primitive->GetAttr(kStrides)); if (strides->size() != kStridesDims) { - MS_LOG(EXCEPTION) << "For '" << primitive->name() << "',strides must be 5, but got " << strides->size(); + MS_LOG(EXCEPTION) << "For '" << primitive->name() << "', 'strides' must be 5, but got " << strides->size() << "."; } if (std::any_of(strides->begin(), strides->end(), [](int64_t stride) { return stride <= 0; })) { MS_EXCEPTION(ValueError) << "For '" << primitive->name() diff --git a/mindspore/core/ops/batch_matmul.cc b/mindspore/core/ops/batch_matmul.cc index b0b72ac6ee3..199329a09ff 100644 --- a/mindspore/core/ops/batch_matmul.cc +++ b/mindspore/core/ops/batch_matmul.cc @@ -68,16 +68,17 @@ abstract::ShapePtr BatchMatmulInferShape(const PrimitivePtr &primitive, auto context = MsContext::GetInstance(); bool is_ascend = (context->get_param(MS_CTX_DEVICE_TARGET) == kAscendDevice); if (!is_ascend && x_shp.size() != y_shp.size()) { - MS_EXCEPTION(ValueError) << "For BatchMatMul on cpu/gpu, input x's size should be equal to input y's size, " - "while x size = " - << x_shp.size() << ", y size = " << y_shp.size(); + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "' on cpu/gpu, input 'x' should have the same size as input 'y'. But got 'x' size: " + << x_shp.size() << "'y' size: " << y_shp.size() << "."; } constexpr size_t x_dim_limit = 3; constexpr size_t y_dim_limit = 2; if (x_shp.size() < x_dim_limit || y_shp.size() < y_dim_limit) { - MS_EXCEPTION(ValueError) << "For BatchMatMul, input x should be greater or equal to 3, input y should be greater " - "or equal to 2 while x size = " - << x_shp.size() << ", y size = " << y_shp.size(); + MS_EXCEPTION(ValueError) + << "For '" << prim_name + << "', input 'x' should be greater or equal to 3, input 'y' should be greater or equal to 2. But got 'x': " + << x_shp.size() << ", 'y': " << y_shp.size() << "."; } constexpr size_t offset = 2; std::vector x_last(x_shp.end() - offset, x_shp.end()); @@ -146,8 +147,8 @@ TypePtr BatchMatmulInferType(const PrimitivePtr &prim, const std::vectorGetAttr("cast_type"); MS_EXCEPTION_IF_NULL(out_type); if (!out_type->isa()) { - MS_EXCEPTION(ValueError) << "For '" << prim->name() << "', MatMul cast_type must be a 'Type', but got " - << out_type; + MS_EXCEPTION(ValueError) << "For '" << prim->name() << "', MatMul cast_type must be a 'Type', but got: '" + << out_type << "'."; } x_type = out_type->cast(); } diff --git a/mindspore/core/ops/bounding_box_decode.cc b/mindspore/core/ops/bounding_box_decode.cc index 9c7f9480bed..f447d09ac6a 100644 --- a/mindspore/core/ops/bounding_box_decode.cc +++ b/mindspore/core/ops/bounding_box_decode.cc @@ -51,18 +51,21 @@ abstract::ShapePtr BoundingBoxDecodeInferShape(const PrimitivePtr &primitive, prim_name); if (anchor_box_shape[0] != deltas_shape[0]) { - MS_EXCEPTION(ValueError) << "anchor_box first dimension must be the same as deltas first dimension, but got" - "anchor_box_shape: " - << anchor_box_shape[0] << ", deltas_shape: " << deltas_shape[0]; + MS_EXCEPTION(ValueError) + << "For '" << prim_name + << "', 'anchor_box' and 'deltas' must have the same first dimension. But got anchor_box_shape[0]: " + << anchor_box_shape[0] << ", deltas_shape[0]: " << deltas_shape[0] << "."; } const int64_t last_dimension = 4; if (anchor_box_shape[1] != last_dimension) { - MS_EXCEPTION(ValueError) << "anchor_box last dimension must be 4, but got anchor_box_shape: " - << anchor_box_shape[1]; + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "', 'anchor_box' must have a last dimension of 4, but got: " << anchor_box_shape[1] + << "."; } if (deltas_shape[1] != last_dimension) { - MS_EXCEPTION(ValueError) << "deltas last dimension must be 4, but got deltas_shape: " << deltas_shape[1]; + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "', 'deltas' must have a last dimension of 4, but got: " << deltas_shape[1] << "."; } auto x_shape = anchor_box->cast(); diff --git a/mindspore/core/ops/coalesce.cc b/mindspore/core/ops/coalesce.cc index 490faef706a..a79465fd96b 100644 --- a/mindspore/core/ops/coalesce.cc +++ b/mindspore/core/ops/coalesce.cc @@ -48,24 +48,23 @@ abstract::TupleShapePtr CoalesceInferShape(const PrimitivePtr &primitive, auto x_values_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[kInputIndex1]->BuildShape())[kShape]; auto x_shape_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[kInputIndex2]->BuildShape())[kShape]; if (x_indices_shape.size() != x_indices_shape_size || x_values_shape.size() != 1 || x_shape_shape.size() != 1) { - MS_EXCEPTION(ValueError) << "For Coalesce, x_indices should be a 2-D tensor" - << ", x_values should be a 1-D tensor" - << ", x_shape should be a 1-D tensor" - << ", but got x_indices is a " << x_indices_shape.size() << "-D tensor" - << ", got x_values is a " << x_values_shape.size() << "-D tensor" - << ", got x_shape is a " << x_shape_shape.size() << "-D tensor"; + MS_EXCEPTION(ValueError) << "For '" << prim_name << "' x_indices must be a 2-D tensor" + << ", x_values and x_shape must be a 1-D tensor, but got x_indices is a " + << x_indices_shape.size() << "-D tensor, got x_values is a " << x_values_shape.size() + << "-D tensor, got x_shape is a " << x_shape_shape.size() << "-D tensor" + << "."; } if (x_indices_shape[0] != x_shape_shape[0]) { MS_EXCEPTION(ValueError) << "For " << prim_name - << ", sizes of dim0 of x_indices and dim0 of x_shape should be the same" - << ", but size of dim0 of got x_indices is " << x_indices_shape[0] - << ", size of dim0 of got x_shape is " << x_shape_shape[0]; + << ", size of dim0 of x_indices and dim0 of x_shape must be the same" + << ", but got x_indices dim0 size: " << x_indices_shape[0] + << ", x_shape dim0 size: " << x_shape_shape[0] << "."; } if (x_indices_shape[1] != x_values_shape[0]) { MS_EXCEPTION(ValueError) << "For " << prim_name - << ", sizes of dim1 of x_indices and dim0 of x_values should be the same" - << ", but size of dim1 of got x_indices is " << x_indices_shape[1] - << ", size of dim0 of got x_values is " << x_values_shape[0]; + << ", size of dim1 of x_indices and dim0 of x_values must be the same" + << ", but got x_indices dim1 size: " << x_indices_shape[1] + << ", x_shape dim0 size: " << x_values_shape[0] << "."; } ShapeVector y_indices_shape = {x_indices_shape[0], -1}; ShapeVector y_indices_min_shape = {x_indices_shape[0], 1}; diff --git a/mindspore/core/ops/complex.cc b/mindspore/core/ops/complex.cc index 49425300972..686cf31ebb4 100644 --- a/mindspore/core/ops/complex.cc +++ b/mindspore/core/ops/complex.cc @@ -96,8 +96,9 @@ ValuePtr ComplexInferValue(const PrimitivePtr &prim, const std::vectordata_type() != imag_tensor->data_type()) { - MS_EXCEPTION(TypeError) << "Inputs of Complex should be same, but got " << real_tensor->data_type() << "and " - << imag_tensor->data_type(); + MS_EXCEPTION(TypeError) << "For 'ComplexInfer', the real part and imaginary part of input should have the same " + "data type. But got real type: " + << real_tensor->data_type() << ", imaginary type: " << imag_tensor->data_type() << "."; } auto data_size = real_tensor->DataSize(); diff --git a/mindspore/core/ops/concat.cc b/mindspore/core/ops/concat.cc index 54b3cb2a1f9..aeb36806f9a 100644 --- a/mindspore/core/ops/concat.cc +++ b/mindspore/core/ops/concat.cc @@ -85,7 +85,8 @@ TypePtr ConcatInferType(const PrimitivePtr &primitive, const std::vectorname(); if (!input_args[0]->isa() && !input_args[0]->isa()) { - MS_EXCEPTION(TypeError) << "The input of Concat must be list or tuple of tensors."; + MS_EXCEPTION(TypeError) << "For '" << prim_name << "', the input should be a list or tuple of tensors. But got:" + << input_args[0]->ToString() << "."; } auto elements = input_args[0]->isa() ? input_args[0]->cast()->elements() diff --git a/mindspore/core/ops/conv2d.cc b/mindspore/core/ops/conv2d.cc index 13cb143c5f9..0b927e3e40d 100644 --- a/mindspore/core/ops/conv2d.cc +++ b/mindspore/core/ops/conv2d.cc @@ -44,8 +44,8 @@ constexpr size_t right_padding = 3; void CheckShapeAnyAndPositive(const std::string &op, const ShapeVector &shape) { for (size_t i = 0; i < shape.size(); ++i) { if ((shape[i] < 0) && (shape[i] != Shape::SHP_ANY)) { - MS_EXCEPTION(ValueError) << op << " shape element [" << i << "] must be positive integer or SHP_ANY, but got " - << shape[i]; + MS_EXCEPTION(ValueError) << "For '" << op << "', shape element [" << i + << "] must be positive integer or SHP_ANY, but got: " << shape[i] << "."; } } } @@ -53,7 +53,8 @@ void CheckShapeAnyAndPositive(const std::string &op, const ShapeVector &shape) { void CheckShapeAllPositive(const std::string &op, const ShapeVector &shape) { for (size_t i = 0; i < shape.size(); ++i) { if (shape[i] < 0) { - MS_LOG(EXCEPTION) << op << " shape element [" << i << "] must be positive integer, but got " << shape[i]; + MS_LOG(EXCEPTION) << "For '" << op << "', shape element [" << i + << "] must be positive integer, but got: " << shape[i] << "."; } } } @@ -62,7 +63,8 @@ int64_t CheckAttrPositiveInt64(const std::string &op, const ValuePtr &attr, cons MS_EXCEPTION_IF_NULL(attr); int64_t attr_val = attr->cast()->value(); if (attr_val <= 0) { - MS_LOG(EXCEPTION) << op << " invalid " << attr_name << " value: " << attr_val << ", should be greater then 0"; + MS_LOG(EXCEPTION) << "For '" << op << "', '" << attr_name << "' should be greater than 0, but got: " << attr_val + << "."; } return attr_val; } diff --git a/mindspore/core/ops/crop_and_resize_grad_boxes.cc b/mindspore/core/ops/crop_and_resize_grad_boxes.cc index e79b5e46824..9a0c6415697 100644 --- a/mindspore/core/ops/crop_and_resize_grad_boxes.cc +++ b/mindspore/core/ops/crop_and_resize_grad_boxes.cc @@ -72,16 +72,25 @@ abstract::ShapePtr CropAndResizeGradBoxesInferShape(const PrimitivePtr &primitiv (void)CheckAndConvertUtils::CheckInteger("box_index rank", SizeToLong(input_shape3.size()), kEqual, kBoxIndShapeLen, prim_name); if (!(input_shape1[kHeight] > 0 && input_shape1[kWidth] > 0)) { - MS_EXCEPTION(ValueError) << "the height and width of images must be over 0 "; + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "', the height and width of images must be greater than 0. But got height: " + << input_shape1[kHeight] << ", width: " << input_shape1[kWidth] << "."; } if (!(input_shape0[kHeight] > 0 && input_shape0[kWidth] > 0)) { - MS_EXCEPTION(ValueError) << "the height and width of grads must be over 0 "; + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "', the height and width of grads must be greater than 0. But got height: " + << input_shape1[kHeight] << ", width: " << input_shape1[kWidth] << "."; } if (!(input_shape0[0] == input_shape3[0] && input_shape2[0] == input_shape0[0])) { - MS_EXCEPTION(ValueError) << "the first dimension of the tensors in {grads, boxes, box_index} must be equal."; + MS_EXCEPTION(ValueError) + << "For '" << prim_name + << "', the first dimension of the tensors in {grads, boxes, box_index} must be equal. But got grads[0]: " + << input_shape0[0] << ", boxes[0]: " << input_shape2[0] << ", box_index[0]: " << input_shape3[0] << "."; } if (input_shape0[kDepth] != input_shape1[kDepth]) { - MS_EXCEPTION(ValueError) << "the depth of grads and images must be equal."; + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "', the depth of grads and images must be equal. But grads depth: " + << input_shape0[kDepth] << ", images depth: " << input_shape1[kDepth] << "."; } return std::make_shared(input_shape2); } diff --git a/mindspore/core/ops/cross.cc b/mindspore/core/ops/cross.cc index 88e2662078f..228ac686770 100644 --- a/mindspore/core/ops/cross.cc +++ b/mindspore/core/ops/cross.cc @@ -57,24 +57,24 @@ abstract::ShapePtr CrossInferShape(const PrimitivePtr &primitive, const std::vec break; } if (i == x1_shape.size() - 1 && x1_shape[i] != dim_size_value) { - MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', The size of inputs dim should be 3, but got " - << x1_shape[i]; + MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', the size of inputs dim should be 3, but got " + << x1_shape[i] << "."; } } } if ((dim < -static_cast(x1_shape.size()) || dim > static_cast(x1_shape.size()) - 1) && dim != default_dim) { - MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "',dim should be between " + MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', dim should be between " << -static_cast(x1_shape.size()) << " and " - << static_cast(x1_shape.size()) - 1 << " ,but got " << dim; + << static_cast(x1_shape.size()) - 1 << " , but got " << dim << "."; } if (dim < 0 && dim != default_dim) { dim = static_cast(x1_shape.size()) + dim; } int64_t dim_size = 3; if (x1_shape[dim] != dim_size && x2_shape[dim] != dim_size && dim != default_dim) { - MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', The size of inputs dim should be 3, but got " - << x1_shape[dim]; + MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', the size of inputs dim should be 3, but got " + << x1_shape[dim] << "."; } return std::make_shared(x1_shape); } diff --git a/mindspore/core/ops/ctc_loss_v2.cc b/mindspore/core/ops/ctc_loss_v2.cc index 86f79c6ba54..77e9bd8ee1b 100644 --- a/mindspore/core/ops/ctc_loss_v2.cc +++ b/mindspore/core/ops/ctc_loss_v2.cc @@ -48,10 +48,11 @@ abstract::TupleShapePtr CTCLossV2InferShape(const PrimitivePtr &primitive, auto targets_shape = targets_shape_map[kShape]; if (log_probs_shape.size() != kLenLogProbs) { MS_LOG(EXCEPTION) << "For '" << prim_name - << "', Input log_probs's dims must be 3, but got :" << log_probs_shape.size(); + << "', input log_probs's dim must be 3, but got: " << log_probs_shape.size() << "."; } if (targets_shape.size() != kLenTarget) { - MS_LOG(EXCEPTION) << "For '" << prim_name << "', Input targets's dims must be 2, but got :" << targets_shape.size(); + MS_LOG(EXCEPTION) << "For '" << prim_name << "', input targets's dims must be 2, but got: " << targets_shape.size() + << "."; } int64_t T = log_probs_shape[0]; int64_t N = log_probs_shape[1]; diff --git a/mindspore/core/ops/ctc_loss_v2_grad.cc b/mindspore/core/ops/ctc_loss_v2_grad.cc index 0b56ddba4b3..9c842411f04 100644 --- a/mindspore/core/ops/ctc_loss_v2_grad.cc +++ b/mindspore/core/ops/ctc_loss_v2_grad.cc @@ -44,7 +44,7 @@ abstract::ShapePtr CTCLossV2GradInferShape(const PrimitivePtr &primitive, auto log_probs_shape = log_probs_shape_map[kShape]; if (log_probs_shape.size() != kLenLogProbs) { MS_LOG(EXCEPTION) << "For '" << prim_name - << "', Input log_probs's dims must be 3, but got :" << log_probs_shape.size(); + << "', input log_probs's dims must be 3, but got: " << log_probs_shape.size() << "."; } int64_t T = log_probs_shape[0]; int64_t N = log_probs_shape[1]; diff --git a/mindspore/core/ops/ctcloss.cc b/mindspore/core/ops/ctcloss.cc index e50e942adb5..9c5f856d383 100644 --- a/mindspore/core/ops/ctcloss.cc +++ b/mindspore/core/ops/ctcloss.cc @@ -58,12 +58,14 @@ void CheckCTCLossInputs(const std::vector &input_args, const st op_name); if (labels_indices_shape[0] != labels_values_shape[0]) { - MS_EXCEPTION(ValueError) << "For CTCLoss first dim of label_indices and label_value must be same, but got " - << labels_indices_shape[0] << " and " << labels_values_shape[0]; + MS_EXCEPTION(ValueError) + << "For 'CTCLoss', the first dim of 'label_indices' and 'label_value' must be same, but got 'label_indices':" + << labels_indices_shape[0] << ", 'label_value': " << labels_values_shape[0] << "."; } if (inputs_shape[1] != sequence_length_shape[0]) { - MS_EXCEPTION(ValueError) << "For CTCLoss input batch_size must be same with sequence_length batch_size, but got " - << inputs_shape[1] << " and " << sequence_length_shape[0]; + MS_EXCEPTION(ValueError) + << "For 'CTCLoss', input batch_size must be same with 'sequence_length' batch_size, but got input batch_size:" + << inputs_shape[1] << ", 'sequence_length' batch_size: " << sequence_length_shape[0] << "."; } } diff --git a/mindspore/core/ops/cummax.cc b/mindspore/core/ops/cummax.cc index c35f84e4c7b..68da05e9b0b 100644 --- a/mindspore/core/ops/cummax.cc +++ b/mindspore/core/ops/cummax.cc @@ -33,13 +33,14 @@ abstract::TupleShapePtr CummaxInferShape(const PrimitivePtr &primitive, auto x_shape_value = CheckAndConvertUtils::ConvertShapePtrToShapeMap(x_shape)[kShape]; auto axis = GetValue(primitive->GetAttr(AXIS)); if (x_shape_value.size() <= 0) { - MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', inputs 'axis' should be greater than 0, but got " - << x_shape_value.size() << "."; + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', input tensor's shape size must be greater than 0, but got: " << x_shape_value.size() + << "."; } if (axis >= static_cast(x_shape_value.size()) || axis < -static_cast(x_shape_value.size())) { - MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "',The value of 'axis' should be in the range of [" + MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', the value of 'axis' must be in the range of [" << -static_cast(x_shape_value.size()) << "," - << static_cast(x_shape_value.size()) << "], but got axis:" << axis << "."; + << static_cast(x_shape_value.size()) << "], but got 'axis': " << axis << "."; } return std::make_shared(std::vector{x_shape, x_shape}); } diff --git a/mindspore/core/ops/diag_part.cc b/mindspore/core/ops/diag_part.cc index 5c6fbfda6f0..877b6a5419a 100644 --- a/mindspore/core/ops/diag_part.cc +++ b/mindspore/core/ops/diag_part.cc @@ -32,8 +32,8 @@ abstract::ShapePtr DiagPartInferShape(const PrimitivePtr &primitive, const std:: auto op_name = primitive->name(); auto input_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[0]->GetShapeTrack())[kShape]; if ((input_shape.size() % kScaleNum) != 0 || input_shape.size() == 0) { - MS_EXCEPTION(ValueError) << "For DiagPart, input rank must be non-zero and even, but got rank " - << input_shape.size(); + MS_EXCEPTION(ValueError) << "For 'DiagPart', input rank must be non-zero and even, but got rank: " + << input_shape.size() << "."; } auto length = input_shape.size() / kScaleNum; std::vector out_shape; diff --git a/mindspore/core/ops/div_no_nan.cc b/mindspore/core/ops/div_no_nan.cc index 0f3061e0dc1..540385045bb 100644 --- a/mindspore/core/ops/div_no_nan.cc +++ b/mindspore/core/ops/div_no_nan.cc @@ -155,8 +155,8 @@ ValuePtr DivNoNanInferValue(const PrimitivePtr &prim, const std::vectorname() << "', the supported type is in the list: ['bool', 'int8', 'int16', 'int32', 'int64', " - "'uint8', 'uint16', 'uint32', 'uint64', 'float16', 'float32', 'float64'], but got " - << result_type->ToString(); + "'uint8', 'uint16', 'uint32', 'uint64', 'float16', 'float32', 'float64'], but got: " + << result_type->ToString() << "."; } } return result_tensor; diff --git a/mindspore/core/ops/dropout_do_mask.cc b/mindspore/core/ops/dropout_do_mask.cc index a5cd29707ff..e092113fac0 100644 --- a/mindspore/core/ops/dropout_do_mask.cc +++ b/mindspore/core/ops/dropout_do_mask.cc @@ -36,8 +36,8 @@ T GetAndCheckKeepProp(const tensor::TensorPtr &keep_prop) { T min = (T)0.0; T max = (T)1.0; if (*value < min || *value > max) { - MS_EXCEPTION(ValueError) << "For 'DropoutDoMask', the 'keep_prop' input value must in the range [0, 1], but got " - << *value << "."; + MS_EXCEPTION(ValueError) + << "For 'DropoutDoMask', the 'keep_prop' input value must be in the range [0, 1], but got: " << *value << "."; } return *value; } @@ -59,22 +59,22 @@ abstract::ShapePtr DropoutDoMaskInferShape(const PrimitivePtr &primitive, x_size *= x_shape_vector[i]; } if (mask_shape_vector.size() != 1) { - MS_EXCEPTION(ValueError) << "For 'DropoutDoMask', the input mask must be 1-dimension, but got " + MS_EXCEPTION(ValueError) << "For 'DropoutDoMask', the input 'mask' must be 1-dimension, but got: " << mask_shape_vector.size() << "."; } auto mask_size = mask_shape_vector[0] * 8; if (x_size > mask_size) { MS_EXCEPTION(ValueError) - << "For 'DropoutDoMask', the input mask should be less than or equal to match input, but got input_x shape: " - << x_shape->ToString() << ", mask shape: " << mask_shape->ToString(); + << "For 'DropoutDoMask', the input 'mask' must be less than or equal to match input, but got 'input_x' shape: " + << x_shape->ToString() << ", 'mask' shape: " << mask_shape->ToString() << "."; } } auto keep_prop = input_args[kInputIndex2]; if (keep_prop->isa()) { auto keep_prop_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(keep_prop->BuildShape())[kShape]; if (!keep_prop_shape.empty()) { - MS_EXCEPTION(ValueError) << "'For DropoutDoMask', the keep_prop's dim must be 0(scalar), but got " - << keep_prop_shape.size(); + MS_EXCEPTION(ValueError) << "'For 'DropoutDoMask', dim of 'keep_prop' must be 0(scalar), but got: " + << keep_prop_shape.size() << "."; } } return x_shape; @@ -106,16 +106,16 @@ TypePtr DropoutDoMaskInferType(const PrimitivePtr &primitive, const std::vector< } else if (keep_prop->isa()) { if (keep_prop_value != nullptr) { if (!keep_prop_value->isa()) { - MS_EXCEPTION(TypeError) << "For 'DropoutDoMask', the keep_prop input type must be float."; + MS_EXCEPTION(TypeError) << "For 'DropoutDoMask', the 'keep_prop' input type must be float."; } auto value = GetValue(keep_prop_value); if (value < 0 || value > 1) { - MS_EXCEPTION(ValueError) << "For 'DropoutDoMask', the keep_prop input value must in the range [0, 1], but got " - << value << "."; + MS_EXCEPTION(ValueError) << "For 'DropoutDoMask', the 'keep_prop' must in the range [0, 1], but got: " << value + << "."; } } } else { - MS_EXCEPTION(TypeError) << "For 'DropoutDoMask', the keep_prop input must be a float number or tensor."; + MS_EXCEPTION(TypeError) << "For 'DropoutDoMask', the 'keep_prop' input must be a float number or tensor."; } (void)CheckAndConvertUtils::CheckTensorTypeValid("inputs", input_args[1]->BuildType(), {kUInt8}, op_name); diff --git a/mindspore/core/ops/dropout_gen_mask.cc b/mindspore/core/ops/dropout_gen_mask.cc index 20213161340..8cf24adfc30 100644 --- a/mindspore/core/ops/dropout_gen_mask.cc +++ b/mindspore/core/ops/dropout_gen_mask.cc @@ -41,11 +41,14 @@ ShapeVector CalDynamicOutputShape(const PrimitivePtr &primitive, const ValuePtrL if (indexed_value->isa()) { value = GetValue(indexed_value); } else { - MS_LOG(EXCEPTION) << "DropOutGenMask shape value must be int64, but " << indexed_value->ToString(); + MS_LOG(EXCEPTION) << "For '" << primitive->name() + << "', the type of shape value must be int64, but got: " << indexed_value->ToString() << "."; } if (value <= 0) { MS_LOG(EXCEPTION) << "DropOutGenMask product of value should be > 0"; + MS_LOG(EXCEPTION) << "For '" << primitive->name() + << "', product of value must be greater than 0, but got: " << value << "."; } if (std::numeric_limits::max() / count / value < 1) { @@ -76,12 +79,13 @@ ShapeVector CalOutputShape(const PrimitivePtr &primitive, const AbstractBasePtrL value = GetValue(value_track); } else { MS_LOG(EXCEPTION) << "For '" << primitive->name() - << "', input x_shape elements should be int64 or int32, but got " << value_track->ToString() + << "', input x_shape elements must be int64 or int32, but got: " << value_track->ToString() << "."; } if (value <= 0) { - MS_LOG(EXCEPTION) << "For '" << primitive->name() << "', product of value should be > 0, but got " << value; + MS_LOG(EXCEPTION) << "For '" << primitive->name() + << "', product of value must be greater than 0, but got: " << value << "."; } if (std::numeric_limits::max() / count / value < 1) { @@ -128,8 +132,8 @@ abstract::ShapePtr DropoutGenMaskInferShape(const PrimitivePtr &primitive, auto shape = shape_base->cast(); MS_EXCEPTION_IF_NULL(shape); if (shape->shape().size() != 1) { - MS_EXCEPTION(TypeError) << "For '" << op_name << "', Input 'shape' must be a 1-D Tensor, but got " - << shape->shape().size() << "."; + MS_EXCEPTION(TypeError) << "For '" << op_name + << "', input 'shape' must be a 1-D Tensor, but got: " << shape->shape().size() << "."; } size_t shape_rank = LongToSize(shape->shape()[0]); @@ -145,7 +149,7 @@ abstract::ShapePtr DropoutGenMaskInferShape(const PrimitivePtr &primitive, if (max_value.size() != shape_rank || min_value.size() != shape_rank) { MS_LOG(EXCEPTION) << "For '" << op_name - << "', The size of max_value and min_value should be equal to the shape rank, but got max_value's size:" + << "', the size of max_value and min_value must be equal to the shape rank, but got max_value's size: " << max_value.size() << ", min_value's size: " << min_value.size() << "."; } ShapeVector out_min_shape = CalDynamicOutputShape(primitive, min_value); diff --git a/mindspore/core/ops/dynamic_broadcast_gradient_args.cc b/mindspore/core/ops/dynamic_broadcast_gradient_args.cc index e0142a3ab2c..64a8c4a6b2d 100644 --- a/mindspore/core/ops/dynamic_broadcast_gradient_args.cc +++ b/mindspore/core/ops/dynamic_broadcast_gradient_args.cc @@ -36,6 +36,8 @@ int64_t CheckInputsAndGetShape(const AbstractBasePtr &input_arg, const string &p auto input_size = input_shape.size(); if (input_size != 1) { MS_EXCEPTION(TypeError) << "For " << prim_name << "', input must be 1-D, but dims is " << input_size; + MS_EXCEPTION(ValueError) << "For '" << prim_name << "', input shape must be 1-D, but got: " << input_size + << "-D."; } if (input_shape[0] == abstract::Shape::SHP_ANY) { auto max_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_arg->BuildShape())[kMaxShape]; diff --git a/mindspore/core/ops/dynamic_broadcast_to.cc b/mindspore/core/ops/dynamic_broadcast_to.cc index 74b0a0e2ac2..6924f51993d 100644 --- a/mindspore/core/ops/dynamic_broadcast_to.cc +++ b/mindspore/core/ops/dynamic_broadcast_to.cc @@ -41,8 +41,8 @@ abstract::ShapePtr DynamicBroadcastToInferShape(const PrimitivePtr &primitive, y_shape = CheckAndConvertUtils::GetTensorInputShape(prim_name, input_args, 1); auto shape_value = y_shape->shape(); if (shape_value.size() != 1) { - MS_EXCEPTION(TypeError) << "For '" << prim_name << "', the shape size should be 1, but got " - << shape_value.size(); + MS_EXCEPTION(TypeError) << "For '" << prim_name << "', the shape size must be 1, but got: " << shape_value.size() + << "."; } std::vector output_shape; std::vector max_shape; @@ -58,12 +58,17 @@ abstract::ShapePtr DynamicBroadcastToInferShape(const PrimitivePtr &primitive, auto min_value = input_y->cast()->get_min_value(); auto max_value = input_y->cast()->get_max_value(); if (!min_value || !max_value) { - MS_EXCEPTION(ValueError) << "For BroadcastTo, inputs['shape'] min or max value is empty."; + MS_EXCEPTION(ValueError) + << "For 'BroadcastTo', inputs['shape'] min or max value can not be empty. But got min: " << min_value + << "max: " << max_value << "."; } min_shape = GetValue>(min_value); max_shape = GetValue>(max_value); if (min_shape.size() != out_dims || max_shape.size() != out_dims) { - MS_EXCEPTION(ValueError) << "For BroadcastTo, inputs['shape'] min or max value not match with out dims."; + MS_EXCEPTION(ValueError) << "For 'BroadcastTo', inputs['shape'] min or max must have the same size as output's" + ". But got min shape size: " + << min_shape.size() << ", max shape size: " << max_shape.size() + << ", output size: " << out_dims << "."; } } return std::make_shared(output_shape, min_shape, max_shape); @@ -71,7 +76,7 @@ abstract::ShapePtr DynamicBroadcastToInferShape(const PrimitivePtr &primitive, auto out_shape = GetValue>(y_value); return std::make_shared(out_shape); } - MS_EXCEPTION(TypeError) << "For BroadcastTo, input args must be tensor or tuple."; + MS_EXCEPTION(TypeError) << "For 'BroadcastTo', input args must be tensor or tuple."; } TypePtr DynamicBroadcastToInferType(const PrimitivePtr &prim, const std::vector &input_args) { diff --git a/mindspore/core/ops/dynamic_resize_nearest_neighbor.cc b/mindspore/core/ops/dynamic_resize_nearest_neighbor.cc index fa3bb2c86f2..c829db6408d 100644 --- a/mindspore/core/ops/dynamic_resize_nearest_neighbor.cc +++ b/mindspore/core/ops/dynamic_resize_nearest_neighbor.cc @@ -57,12 +57,14 @@ abstract::ShapePtr DynamicResizeNearestNeighborInferShape(const PrimitivePtr &pr auto min_value = size->cast()->get_min_value(); auto max_value = size->cast()->get_max_value(); if (!min_value || !max_value) { - MS_EXCEPTION(ValueError) << "For ResizeNearestNeighbor, inputs['size'] min or max value is empty."; + MS_EXCEPTION(ValueError) << "For 'ResizeNearestNeighbor', inputs['size'] min or max value is can not be empty."; } min_size = GetValue>(min_value); max_size = GetValue>(max_value); if (min_size.size() != size_size || max_size.size() != size_size) { - MS_EXCEPTION(ValueError) << "For ResizeNearestNeighbor, inputs['size'] min or max value size is not 2."; + MS_EXCEPTION(ValueError) + << "For 'ResizeNearestNeighbor', inputs['size'] min and max value size must be 2, but got min: " + << min_size.size() << ", max: " << max_size.size() << "."; } } } else if (size->isa()) { diff --git a/mindspore/core/ops/einsum.cc b/mindspore/core/ops/einsum.cc index a3c6118735e..368aa068169 100644 --- a/mindspore/core/ops/einsum.cc +++ b/mindspore/core/ops/einsum.cc @@ -78,8 +78,8 @@ static void seg_left_equation(const std::string &left_equation, const std::strin } found_ell = false; } else { - MS_EXCEPTION(ValueError) << "For " << prim_name << ", Operand " << cur_element - << " in the equation contains invalid subscript, which can only consist of [a-zA-z]."; + MS_EXCEPTION(ValueError) << "For '" << prim_name << "', operand " << cur_element + << " in the equation can only contain [a-zA-z], but got: " << cur_element << "."; } } if (cur_element != input_shapes.size() - 1) { @@ -234,13 +234,13 @@ abstract::ShapePtr EinsumInferShape(const PrimitivePtr &primitive, const std::ve auto equation = GetValue(primitive->GetAttr(kEquation)); equation.erase(std::remove(equation.begin(), equation.end(), ' '), equation.end()); if (equation.length() == 0) { - MS_EXCEPTION(ValueError) << "For " << prim_name << ", the equation is required, but got none."; + MS_EXCEPTION(ValueError) << "For '" << prim_name << "', the equation is required, but got none."; } const std::string seg_arrow = "->"; const auto seg_pos = equation.find(seg_arrow); if (seg_pos == 0) { - MS_EXCEPTION(ValueError) << "For " << prim_name - << ", the equation should contain characters to the left of the arrow, but got none."; + MS_EXCEPTION(ValueError) << "For '" << prim_name + << "', the equation should contain characters to the left of the arrow, but got none."; } (void)CheckAndConvertUtils::CheckInteger("input number", SizeToLong(input_args.size()), kGreaterEqual, 1, prim_name); @@ -259,13 +259,13 @@ abstract::ShapePtr EinsumInferShape(const PrimitivePtr &primitive, const std::ve auto shape = elements[idx]->BuildShape(); MS_EXCEPTION_IF_NULL(shape); if (shape->IsDimZero()) { - MS_EXCEPTION(ValueError) << "For " << prim_name << ", the dim of inputs' shape can not be zero, but got input[" + MS_EXCEPTION(ValueError) << "For '" << prim_name << "', the dim of inputs' shape can not be zero, but got input[" << idx << "] shape: " << shape->ToString() << "."; } auto &shape_vec = shape->cast()->shape(); for (auto &val : shape_vec) { if (val == 0) { - MS_EXCEPTION(ValueError) << "For " << prim_name << ", the shape can not contain zero, but got input[" << idx + MS_EXCEPTION(ValueError) << "For '" << prim_name << "', the shape can not contain zero, but got input[" << idx << "] shape: " << shape->ToString() << "."; } } diff --git a/mindspore/core/ops/fill_v2.cc b/mindspore/core/ops/fill_v2.cc index 784682b0e2a..ddd01a8cbb8 100644 --- a/mindspore/core/ops/fill_v2.cc +++ b/mindspore/core/ops/fill_v2.cc @@ -41,11 +41,13 @@ abstract::ShapePtr FillV2InferShape(const PrimitivePtr &primitive, const std::ve int64_t max_length = GetValue(max_length_ptr); auto input1_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[0]->BuildShape())[kShape]; if (input1_shape.size() != 1) { - MS_EXCEPTION(ValueError) << "the shape size of the input1 must be equal to 1."; + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', the shape size of 'input1' must be 1, but got: " << input1_shape.size() << "."; } auto input2_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[1]->BuildShape())[kShape]; if (input2_shape.size() != 0) { - MS_EXCEPTION(ValueError) << "the shape size of the input2 must be equal to 0."; + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', the shape size of 'input2' must be 0, but got: " << input2_shape.size() << "."; } auto input_shape = input_args[0]->cast(); MS_EXCEPTION_IF_NULL(input_shape); @@ -62,7 +64,8 @@ abstract::ShapePtr FillV2InferShape(const PrimitivePtr &primitive, const std::ve CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[0]->BuildShape())[kShape]); auto shape_v = shape_ptr->shape(); if (shape_v.size() != kInputDims) { - MS_EXCEPTION(ValueError) << "The input tensor must be a 1-D tensor."; + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', input must be a 1-D tensor, but got: " << shape_v.size() << "-D."; } if (!input_args[0]->BuildValue()->isa() && !input_args[0]->BuildValue()->isa()) { std::vector out_shape; @@ -74,7 +77,8 @@ abstract::ShapePtr FillV2InferShape(const PrimitivePtr &primitive, const std::ve out_shape.push_back(input_shape_ptr[i]); shape_m *= input_shape_ptr[i]; } else { - MS_EXCEPTION(ValueError) << "Each dimension must be greater than 0."; + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', each dimension of input shape must be greater than 0."; } } } else if (input_type_element->type_id() == kNumberTypeInt64) { @@ -84,16 +88,18 @@ abstract::ShapePtr FillV2InferShape(const PrimitivePtr &primitive, const std::ve out_shape.push_back(input_shape_ptr[i]); shape_m *= input_shape_ptr[i]; } else { - MS_EXCEPTION(ValueError) << "Each dimension must be greater than 0."; + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', each dimension of input shape must be greater than 0."; } } } else { - MS_EXCEPTION(TypeError) << "the datatype of the input1 not support, support datatype: int32, int64."; + MS_EXCEPTION(TypeError) << "For '" << primitive->name() << "', the dtype of input1 must be in [int32, int64]."; } if (shape_m > max_length) { - MS_EXCEPTION(ValueError) << "The number of elements of output must be less than max length: " << max_length - << ", but got " << shape_m - << "! The shape of output should be reduced or max_length should be increased"; + MS_EXCEPTION(ValueError) + << "For '" << primitive->name() + << "', the number of elements of output must be less than 'max_length'. But got number of elements: " << shape_m + << ", 'max_length': " << max_length << "."; } return std::make_shared(out_shape); } else { @@ -120,7 +126,7 @@ TypePtr FillV2InferType(const PrimitivePtr &primitive, const std::vector input1_valid_types = {kInt32, kInt64}; (void)CheckAndConvertUtils::CheckTensorTypeValid("input1 datatype", input1_type, input1_valid_types, prim_name); } else { - MS_EXCEPTION(TypeError) << "the datatype of the input1 not support, support datatype: int32, int64."; + MS_EXCEPTION(TypeError) << "For '" << primitive->name() << "', the dtype of input1 must be in [int32, int64]."; } // Check the data type of the second input and infer the data type of the output from the second input auto input2 = input_args[kInputIndex1]; @@ -131,9 +137,10 @@ TypePtr FillV2InferType(const PrimitivePtr &primitive, const std::vectorcast())->element(); diff --git a/mindspore/core/ops/floor_div.cc b/mindspore/core/ops/floor_div.cc index 736f8c05070..dc61364eec4 100644 --- a/mindspore/core/ops/floor_div.cc +++ b/mindspore/core/ops/floor_div.cc @@ -58,7 +58,7 @@ TypePtr FloorDivInferType(const PrimitivePtr &primitive, const std::vectorisa() && !input_type02->isa()) { MS_EXCEPTION(TypeError) << "For " << prim_name << "," << " one of the inputs must be tensor type but got " << input_type01->ToString() << " and " - << input_type02->ToString(); + << input_type02->ToString() << "."; } const std::set valid_types = {kFloat16, kFloat32, kFloat64, kInt8, kInt16, kInt32, kInt64, kUInt8, kUInt16, kBool}; diff --git a/mindspore/core/ops/floor_mod.cc b/mindspore/core/ops/floor_mod.cc index bbc6727a9a9..9c0e65ff3b0 100644 --- a/mindspore/core/ops/floor_mod.cc +++ b/mindspore/core/ops/floor_mod.cc @@ -81,8 +81,8 @@ TypePtr FloorModInferType(const PrimitivePtr &primitive, const std::vectorToString() << ", " << type_y->ToString() << "]."; } } diff --git a/mindspore/core/ops/fractional_max_pool.cc b/mindspore/core/ops/fractional_max_pool.cc index 965eafc940c..4f9955f27f3 100644 --- a/mindspore/core/ops/fractional_max_pool.cc +++ b/mindspore/core/ops/fractional_max_pool.cc @@ -39,8 +39,9 @@ abstract::TupleShapePtr FractionalMaxPoolInferShape(const PrimitivePtr &primitiv (void)CheckAndConvertUtils::CheckInteger("input_rank", SizeToLong(in_shape.size()), kEqual, x_rank, op_name); for (int i = 0; i < x_rank; i++) { if (in_shape[i] <= 0) { - MS_EXCEPTION(ValueError) << "For '" << op_name << "', the shape of input 'x' must be > 0 " - << std::to_string(in_shape[i]) << "."; + MS_EXCEPTION(ValueError) << "For '" << op_name + << "', input shape must be greater than 0, but got: " << std::to_string(in_shape[i]) + << "."; } } auto pooling_ratio = GetValue>(primitive->GetAttr(kPoolingRatio)); @@ -54,20 +55,17 @@ abstract::TupleShapePtr FractionalMaxPoolInferShape(const PrimitivePtr &primitiv << std::to_string(pooling_ratio[0]) << "."; } if (pooling_ratio[kInputIndex1] < 1.0) { - MS_EXCEPTION(ValueError) - << "For '" << op_name - << "', the element of parameter 'pooling_ratio' must be larger than 1.0, but pooling_ratio[1] = " - << std::to_string(pooling_ratio[kInputIndex1]) << "."; + MS_EXCEPTION(ValueError) << "For '" << op_name + << "', the second element of pooling ratio must be greater than or equal to 1.0, but got: " + << std::to_string(pooling_ratio[kInputIndex1]) << "."; } if (pooling_ratio[kInputIndex2] < 1.0) { - MS_EXCEPTION(ValueError) - << "For '" << op_name - << "', the element of parameter 'pooling_ratio' must be larger than 1.0, but pooling_ratio[2] = " - << std::to_string(pooling_ratio[kInputIndex2]) << "."; + MS_EXCEPTION(ValueError) << "For '" << op_name + << "', the third element of pooling ratio must be greater than or equal to 1.0, but got: " + << std::to_string(pooling_ratio[kInputIndex2]) << "."; } if (pooling_ratio[kInputIndex3] != 1.0) { - MS_EXCEPTION(ValueError) << "For '" << op_name - << "', the last element of parameter 'pooling_ratio' must be 1.0, but got " + MS_EXCEPTION(ValueError) << "For '" << op_name << "', the forth element of 'pooling_ratio' must be 1.0, but got: " << std::to_string(pooling_ratio[kInputIndex3]) << "."; } std::vector out_shape(x_rank); @@ -76,7 +74,8 @@ abstract::TupleShapePtr FractionalMaxPoolInferShape(const PrimitivePtr &primitiv } if (std::any_of(out_shape.begin(), out_shape.end(), [](int64_t a) { return a <= 0; })) { MS_EXCEPTION(ValueError) << "For '" << op_name - << "', output shape <=0, the value of parameter 'pooling_ratio' is not valid."; + << "', out shape must be greater than 0, but got out_shape: " << out_shape + << ". Check if the pooling ratio is valid."; } int64_t row = out_shape[kInputIndex1] + 1; int64_t col = out_shape[kInputIndex2] + 1; diff --git a/mindspore/core/ops/gather.cc b/mindspore/core/ops/gather.cc index d6a1c0aa8d1..c7030496c7c 100644 --- a/mindspore/core/ops/gather.cc +++ b/mindspore/core/ops/gather.cc @@ -65,7 +65,7 @@ AbstractBasePtr GatherInfer(const abstract::AnalysisEnginePtr &, const Primitive } else { MS_LOG(EXCEPTION) << "For '" << primitive->name() << "', the third input type should be tensor or scalar, but got invalid abstract type:" - << input_args[kInputIndex2]->type_name(); + << input_args[kInputIndex2]->type_name() << "."; } auto params_shp = params->shape()->shape(); auto indices_shp = indices->shape()->shape(); @@ -78,8 +78,8 @@ AbstractBasePtr GatherInfer(const abstract::AnalysisEnginePtr &, const Primitive ShapeVector indices_shp_max = (ind_dyn) ? indices->shape()->max_shape() : indices->shape()->shape(); // check axis_val within interval: [-params_rank, params_rank) if (!(-params_rank <= axis_val) || !(axis_val < params_rank)) { - MS_LOG(EXCEPTION) << "For Gather - Axis value must be within [ " << -params_rank << ", " << params_rank << " ) " - << "Got " << axis_val << "."; + MS_LOG(EXCEPTION) << "For 'Gather', axis value must be within range [" << -params_rank << ", " << params_rank + << "], but got: " << axis_val << "."; } if (axis_val < 0) { axis_val += params_rank; diff --git a/mindspore/core/ops/gather_d.cc b/mindspore/core/ops/gather_d.cc index 2468859dcfd..387cca5fed9 100644 --- a/mindspore/core/ops/gather_d.cc +++ b/mindspore/core/ops/gather_d.cc @@ -45,7 +45,7 @@ abstract::ShapePtr GatherDInferShape(const PrimitivePtr &primitive, const std::v } for (size_t i = 0; i < x_shape.size(); ++i) { if (SizeToLong(i) == dim_v) continue; - MS_LOG(INFO) << "For '" << prim_name << "', it's now being check " << i << "th x shape."; + MS_LOG(INFO) << "For '" << prim_name << "', it's now checking " << i << "th x shape."; CheckAndConvertUtils::Check("x shape", x_shape[i], kEqual, index_shape[i], prim_name); } return std::make_shared(index_shape); diff --git a/mindspore/core/ops/grid_sampler_3d.cc b/mindspore/core/ops/grid_sampler_3d.cc index ffdeaaa1d3a..1db2901a51d 100644 --- a/mindspore/core/ops/grid_sampler_3d.cc +++ b/mindspore/core/ops/grid_sampler_3d.cc @@ -29,23 +29,23 @@ abstract::ShapePtr GridSampler3DInferShape(const PrimitivePtr &primitive, auto grid_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[kInputIndex1]->BuildShape())[kShape]; const size_t kFive = 5; if (input_x_shape.size() != kFive) { - MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', Input_x must be a 5-dimensional tensor, but got " - << std::to_string(input_x_shape.size()) << "-dimensional tensor."; + MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', 'input_x' must be a 5-D tensor, but got " + << std::to_string(input_x_shape.size()) << "-D tensor."; } if (grid_shape.size() != kFive) { - MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', Grid must be a 5-dimensional tensor, but got " - << std::to_string(grid_shape.size()) << "-dimensional tensor."; + MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', 'grid' must be a 5-D tensor, but got " + << std::to_string(grid_shape.size()) << "-D tensor."; } if (input_x_shape[kInputIndex0] != grid_shape[kInputIndex0]) { MS_EXCEPTION(ValueError) << "For '" << primitive->name() - << "', The first dimension of 'grid' and 'input_x' must be equal, but got the shape of 'grid' is " + << "', the first dimension of 'grid' and 'input_x' must be equal, but got the shape of 'grid' is " << input_args[kInputIndex1]->BuildShape()->ToString() << " , and the shape of 'input_x' is " << input_args[kInputIndex0]->BuildShape()->ToString() << "."; } if (grid_shape[kInputIndex4] != kInputIndex3) { - MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', The last dimension of grid must be 3, but got " - << std::to_string(grid_shape[kInputIndex4]); + MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', the last dimension of grid must be 3, but got " + << std::to_string(grid_shape[kInputIndex4]) << "."; } std::vector output_shape = {input_x_shape[kInputIndex0], input_x_shape[kInputIndex1], grid_shape[kInputIndex1], grid_shape[kInputIndex2], grid_shape[kInputIndex3]}; diff --git a/mindspore/core/ops/iou.cc b/mindspore/core/ops/iou.cc index 3234a98056a..55ee4f65bd2 100644 --- a/mindspore/core/ops/iou.cc +++ b/mindspore/core/ops/iou.cc @@ -39,8 +39,9 @@ abstract::ShapePtr IOUInferShape(const PrimitivePtr &primitive, const std::vecto auto x_shp = x_shape_map[kShape]; auto y_shp = y_shape_map[kShape]; if (x_shp.size() != 2 || y_shp.size() != 2) { - MS_EXCEPTION(ValueError) << "For BatchMatMul, input x, y should have the same dimension size and should be greater" - << "or equal to 3, while x size = " << x_shp.size() << ", y size = " << y_shp.size(); + MS_EXCEPTION(ValueError) << "For 'BatchMatMul', input x, y must have the same dimension size and should be greater" + << "or equal to 3. But got x size = " << x_shp.size() << ", y size = " << y_shp.size() + << "."; } (void)CheckAndConvertUtils::CheckInteger("input numbers", SizeToLong(x_shp[1]), kGreaterEqual, 4, prim_name); (void)CheckAndConvertUtils::CheckInteger("input numbers", SizeToLong(y_shp[1]), kGreaterEqual, 4, prim_name); diff --git a/mindspore/core/ops/is_close.cc b/mindspore/core/ops/is_close.cc index 145bc6a5a9f..24c22e474c3 100644 --- a/mindspore/core/ops/is_close.cc +++ b/mindspore/core/ops/is_close.cc @@ -48,19 +48,19 @@ abstract::ShapePtr IsCloseInferShape(const PrimitivePtr &primitive, const std::v other_size *= other_shape[i]; if (input_shape[i] != other_shape[i] && (input_shape[i] != 1 || other_shape[i] != 1)) { MS_EXCEPTION(ValueError) << "For '" << op_name - << "', The size of tensor input must match the size of tensor other at the " << i - << " dimension, but got input size: " << input_shape[i] - << ", other size: " << other_shape[i] << "."; + << "', the size of tensor 'input' must match the size of tensor 'other' at the " << i + << "th dimension, but got 'input' size: " << input_shape[i] + << ", 'other' size: " << other_shape[i] << "."; } } if (input_size > MAX) MS_EXCEPTION(ValueError) << "For '" << op_name - << "', The size of tensor input must should be less than [2147483648], actual is " - << input_size; + << "', the size of tensor 'input' must be less than [2147483648], but got: " + << input_size << "."; if (other_size > MAX) MS_EXCEPTION(ValueError) << "For '" << op_name - << "', The size of tensor other must should be less than [2147483648], actual is " - << other_size; + << "', the size of tensor 'other' must be less than [2147483648], but got: " + << other_size << "."; } return BroadCastInferShape(op_name, input_args); } diff --git a/mindspore/core/ops/lars_v2_update.cc b/mindspore/core/ops/lars_v2_update.cc index daa9f49d202..c41625c09c7 100644 --- a/mindspore/core/ops/lars_v2_update.cc +++ b/mindspore/core/ops/lars_v2_update.cc @@ -37,28 +37,31 @@ abstract::ShapePtr LARSUpdateInferShape(const PrimitivePtr &primitive, const std if (weight_shape[kShape].size() != gradient_shape[kShape].size()) { MS_EXCEPTION(ValueError) << "For '" << op_name - << "', weight shape size should be equal to gradient shape size, but got " - << "weight shape: " << weight_shape << " and gradient shape: " << gradient_shape; + << "', weight shape size must be equal to gradient shape size, but got " + << "weight shape size: " << weight_shape[kShape].size() + << ", gradient shape size: " << gradient_shape[kShape].size() << "."; } if (norm_weight_shape[kShape].size() != norm_gradient_shape[kShape].size()) { MS_EXCEPTION(ValueError) << "For " << op_name - << "', norm weight shape size should be equal to norm gradient shape size, but got " - << "weight shape: " << norm_weight_shape << " and gradient shape: " << norm_gradient_shape; + << "', norm weight shape size must be equal to norm gradient shape size, but got " + << "norm weight shape size: " << norm_weight_shape[kShape].size() + << ", norm gradient shape size: " << norm_gradient_shape[kShape].size() << "."; } for (size_t index = 0; index < weight_shape[kShape].size(); index++) { if (weight_shape[kShape][index] != gradient_shape[kShape][index]) { - MS_EXCEPTION(ValueError) << "For '" << op_name << "', The " << index - << "'s shape of weight shape should euqal with gradient shape, but got " - << "weight shape: " << norm_weight_shape - << " and gradient shape:" << norm_gradient_shape; + MS_EXCEPTION(ValueError) << "For '" << op_name << "', the " << index + << "th dim of weight shape and gradient shape must be equal, but got " + << "weight shape[" << index << "]: " << weight_shape[kShape][index] + << ", gradient shape[" << index << "]: " << gradient_shape[kShape][index] << "."; } } - for (size_t index = 0; index < weight_shape[kShape].size(); index++) { - if (weight_shape[kShape][index] != gradient_shape[kShape][index]) { - MS_EXCEPTION(ValueError) << "For '" << op_name << "', The " << index - << "'s shape of weight shape should euqal with gradient shape, but got " - << "weight shape: " << norm_weight_shape - << " and gradient shape:" << norm_gradient_shape; + for (size_t index = 0; index < norm_weight_shape[kShape].size(); index++) { + if (norm_weight_shape[kShape][index] != norm_gradient_shape[kShape][index]) { + MS_EXCEPTION(ValueError) << "For '" << op_name << "', the " << index + << "th dim of norm weight shape and norm gradient shape must be equal, but got " + << "norm weight shape[" << index << "]: " << norm_weight_shape[kShape][index] + << ", norm gradient shape[" << index << "]: " << norm_gradient_shape[kShape][index] + << "."; } } auto shp_len = weight_decay_shape[kShape].size(); diff --git a/mindspore/core/ops/layer_norm.cc b/mindspore/core/ops/layer_norm.cc index 06d43153f08..fb71862de7d 100644 --- a/mindspore/core/ops/layer_norm.cc +++ b/mindspore/core/ops/layer_norm.cc @@ -58,7 +58,7 @@ AbstractBasePtr LayerNormInfer(const abstract::AnalysisEnginePtr &, const Primit auto const &input_shape_list = input_shape->shape(); const size_t input_rank = input_shape_list.size(); if (input_rank == 0) { - MS_LOG(EXCEPTION) << "For '" << op_name << "', input_rank should not be zero, but got " << input_rank; + MS_LOG(EXCEPTION) << "For '" << op_name << "', input_rank can not be zero, but got: " << input_rank << "."; } // begin_norm_axis and begin_params_axis should be smaller than the size of input_x and >= -1 @@ -86,7 +86,7 @@ AbstractBasePtr LayerNormInfer(const abstract::AnalysisEnginePtr &, const Primit auto const &gamma_shape_list = gamma_shape->shape(); auto const &beta_shape_list = beta_shape->shape(); if (gamma_shape_list.empty() || beta_shape_list.empty()) { - MS_LOG(EXCEPTION) << "LayerNorm evaluator gamma or beta is a AbstractScalar that is not support."; + MS_LOG(EXCEPTION) << "For 'LayerNorm', evaluator gamma or beta can not be an AbstractScalar."; } size_t begin_params_axis_u = LongToSize(begin_params_axis); @@ -106,9 +106,9 @@ AbstractBasePtr LayerNormInfer(const abstract::AnalysisEnginePtr &, const Primit if ((gamma_shape_list[gamma_beta_shape_dim] != input_shape_list[i]) || (beta_shape_list[gamma_beta_shape_dim] != input_shape_list[i])) { MS_LOG(EXCEPTION) << "For '" << op_name - << "', Gamma or beta shape should match input shape, but got input shape: " + << "', gamma or beta shape should match input shape, but got input shape: " << input_shape->ToString() << ", gamma shape: " << gamma_shape->ToString() - << ", beta shape: " << beta_shape->ToString(); + << ", beta shape: " << beta_shape->ToString() << "."; } } diff --git a/mindspore/core/ops/lower_bound.cc b/mindspore/core/ops/lower_bound.cc index b35465d70ca..58dd2ae17ce 100644 --- a/mindspore/core/ops/lower_bound.cc +++ b/mindspore/core/ops/lower_bound.cc @@ -29,16 +29,16 @@ abstract::ShapePtr LowerBoundInferShape(const PrimitivePtr &primitive, const std size_t size_exp = 2; if (x_shape.size() != size_exp) { MS_EXCEPTION(ValueError) << "For '" << primitive->name() - << "', The rank of sorted_x need to be equal to 2, but got " << values_shape.size(); + << "', the rank of sorted_x must be 2, but got: " << values_shape.size() << "."; } if (values_shape.size() != size_exp) { - MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', The rank of values need to be equal to 2, but got " - << values_shape.size(); + MS_EXCEPTION(ValueError) << "For '" << primitive->name() + << "', the rank of values must be 2, but got: " << values_shape.size() << "."; } if (x_shape[0] != values_shape[0]) { MS_EXCEPTION(ValueError) << "For '" << primitive->name() - << "', The first dimension of shape of sorted_x must be equal to that of values, but got shape of values: " + << "', the first dimension of the shape of sorted_x must be equal to that of values, but got shape of values: " << input_args[1]->BuildShape()->ToString() << ", shape of sorted_x:" << input_args[0]->BuildShape()->ToString() << "."; } @@ -58,8 +58,8 @@ TypePtr LowerBoundInferType(const PrimitivePtr &primitive, const std::vectortype_id(); MS_EXCEPTION_IF_NULL(out_type); if (out_type_id != kInt32->type_id() && out_type_id != kInt64->type_id()) { - MS_EXCEPTION(TypeError) << "For '" << primitive->name() << "', 'out_type' must be int32 or int64, but got " - << out_type; + MS_EXCEPTION(TypeError) << "For '" << primitive->name() + << "', 'out_type' must be int32 or int64, but got: " << out_type << "."; } return out_type; } diff --git a/mindspore/core/ops/lstsq.cc b/mindspore/core/ops/lstsq.cc index b5b5bb81b00..02cc699d1ce 100644 --- a/mindspore/core/ops/lstsq.cc +++ b/mindspore/core/ops/lstsq.cc @@ -38,16 +38,17 @@ abstract::ShapePtr LstsqInferShape(const PrimitivePtr &primitive, const std::vec auto a_shape_map = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[1]->BuildShape()); auto a_shape = a_shape_map[kShape]; if (x_shape.size() != x_dim_num) { - MS_EXCEPTION(ValueError) << "For lstsq, the dimension of x must be equal to 2, while got x_dim: " << x_shape.size() + MS_EXCEPTION(ValueError) << "For 'Lstsq', the dimension of x must be equal to 2, but got x_dim: " << x_shape.size() << "."; } if (a_shape.size() != a_dim_num_2 && a_shape.size() != a_dim_num_1) { - MS_EXCEPTION(ValueError) << "For lstsq, the dimension of a must be equal to 2 or 1, while got a_dim: " + MS_EXCEPTION(ValueError) << "For 'Lstsq', the dimension of 'a' must be equal to 2 or 1, but got a_dim: " << a_shape.size() << "."; } if (x_shape[0] != a_shape[0]) { - MS_EXCEPTION(ValueError) << "For lstsq, the length of x_dim[0]: " << x_shape[0] - << " is not equal to the length of a_dims[0]: " << a_shape[0] << "."; + MS_EXCEPTION(ValueError) + << "For 'Lstsq', the length of x_dim[0] must be equal to the length of a_dims[0]. But got x_dim[0]: " + << x_shape[0] << ", a_dims[0]: " << a_shape[0] << "."; } ShapeVector y_shape; if (a_shape.size() == a_dim_num_1) { diff --git a/mindspore/core/ops/lu_solve_.cc b/mindspore/core/ops/lu_solve_.cc index 9b32cc018ac..b2f62a32d9e 100644 --- a/mindspore/core/ops/lu_solve_.cc +++ b/mindspore/core/ops/lu_solve_.cc @@ -60,23 +60,29 @@ abstract::ShapePtr LuSolveInferShape(const PrimitivePtr &primitive, const std::v auto lu_pivots_shape_map = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[2]->BuildShape()); auto lu_pivots_shape = lu_pivots_shape_map[kShape]; if (lu_data_shape.size() < kDimNum) { - MS_EXCEPTION(ValueError) << "For " << op_name << " lu_data's dimensions should be greater than or equal to 2."; + MS_EXCEPTION(ValueError) << "For '" << op_name + << "', lu_data's dimension must be greater than or equal to 2, but got: " + << lu_data_shape.size() << "."; } if (x_shape.size() < kDimNum) { - MS_EXCEPTION(ValueError) << "For " << op_name << " x's dimensions should be greater than or equal to 2."; + MS_EXCEPTION(ValueError) << "For '" << op_name + << "', x's dimension must be greater than or equal to 2, but got: " << x_shape.size() + << "."; } if (lu_pivots_shape.size() < 1) { - MS_EXCEPTION(ValueError) << "For " << op_name << " lu_pivots's dimensions should be greater than or equal to 1."; + MS_EXCEPTION(ValueError) << "For '" << op_name + << "', lu_pivots's dimension must be greater than or equal to 1, but got: " + << lu_pivots_shape.size() << "."; } if (lu_data_shape[lu_data_shape.size() - 1] != lu_data_shape[lu_data_shape.size() - kDimNum]) { - MS_EXCEPTION(ValueError) << "For " << op_name << " input lu_data should be square matrix " - << "while row is " << lu_data_shape[lu_data_shape.size() - kDimNum] << ", col is " - << lu_data_shape[lu_data_shape.size() - 1] << "."; + MS_EXCEPTION(ValueError) << "For '" << op_name << "', input lu_data should be a square matrix, " + << "but got row: " << lu_data_shape[lu_data_shape.size() - kDimNum] + << ", col: " << lu_data_shape[lu_data_shape.size() - 1] << "."; } if (x_shape[x_shape.size() - kDimNum] != lu_data_shape[lu_data_shape.size() - kDimNum]) { - MS_EXCEPTION(ValueError) << "For " << op_name << " x's col rank is not same as lu_data's col rank. " - << "x is " << x_shape[x_shape.size() - kDimNum] << ", lu_data is " - << lu_data_shape[lu_data_shape.size() - kDimNum] << "."; + MS_EXCEPTION(ValueError) << "For '" << op_name << "', x's col rank must be the same as lu_data's col rank, " + << "but got x's: " << x_shape[x_shape.size() - kDimNum] + << ", lu_data's: " << lu_data_shape[lu_data_shape.size() - kDimNum] << "."; } if (x_shape.size() == lu_data_shape.size()) { for (size_t i = 0; i <= x_shape.size() - kDimNum; i++) { @@ -101,10 +107,10 @@ abstract::ShapePtr LuSolveInferShape(const PrimitivePtr &primitive, const std::v } } if (lu_pivots_shape[lu_pivots_shape.size() - 1] != lu_data_shape[lu_data_shape.size() - 1]) { - MS_EXCEPTION(ValueError) << "For " << op_name - << " the last dimension of lu_pivots must be equal to the last dimension of lu_data, " - << "lu_data is " << lu_data_shape[lu_data_shape.size() - 1] << ", lu_pivots is " - << lu_pivots_shape[lu_pivots_shape.size() - 1] << "."; + MS_EXCEPTION(ValueError) << "For '" << op_name + << "', the last dimension of lu_pivots must be the same as lu_data's, " + << "but got lu_pivots': " << lu_pivots_shape[lu_pivots_shape.size() - 1] + << ", lu_data's: " << lu_data_shape[lu_data_shape.size() - 1] << "."; } for (size_t i = 0; i < lu_pivots_shape.size(); i++) { if (lu_data_shape[i] != lu_pivots_shape[i]) { diff --git a/mindspore/core/ops/masked_fill.cc b/mindspore/core/ops/masked_fill.cc index d05ad5b806a..730296d8807 100644 --- a/mindspore/core/ops/masked_fill.cc +++ b/mindspore/core/ops/masked_fill.cc @@ -40,8 +40,8 @@ abstract::ShapePtr MaskedFillInferShape(const PrimitivePtr &primitive, const std if (input_args[kInputIndex2]->isa()) { if (value_shape.size() != 0) { MS_EXCEPTION(ValueError) - << "For " + op_name + - ", 'value' only supports a 0-dimensional value tensor or a float number, but got tensor with " + << "For '" << op_name + << "', 'value' only supports a 0-dimensional value tensor or a float number, but got tensor with " << value_shape.size() << " dimension(s)."; } broadcast_shape = CalBroadCastShape(broadcast_shape, value_shape, op_name); diff --git a/mindspore/core/ops/maximum.cc b/mindspore/core/ops/maximum.cc index c043d2bff1a..b8c1021cee6 100644 --- a/mindspore/core/ops/maximum.cc +++ b/mindspore/core/ops/maximum.cc @@ -68,8 +68,8 @@ TypePtr MaximumInferType(const PrimitivePtr &prim, const std::vectorToString() << ", " << type_y->ToString() << "]."; } } diff --git a/mindspore/core/ops/mod.cc b/mindspore/core/ops/mod.cc index 6c42cb336b9..2f552d22013 100644 --- a/mindspore/core/ops/mod.cc +++ b/mindspore/core/ops/mod.cc @@ -69,8 +69,8 @@ TypePtr ModInferType(const PrimitivePtr &prim, const std::vectorToString() << ", " << type_y->ToString() << "]."; } } diff --git a/mindspore/core/ops/mulnonan.cc b/mindspore/core/ops/mulnonan.cc index 9ad5cf5111a..b1e3510fe01 100644 --- a/mindspore/core/ops/mulnonan.cc +++ b/mindspore/core/ops/mulnonan.cc @@ -64,9 +64,9 @@ TypePtr MulNoNanInferType(const PrimitivePtr &prim, const std::vectorname() - << "', Complex math binary op expecting Tensor [complex64, complex64],[complex64, float32], [float32, " - "complex64],[complex128, complex128],[complex128, float64], [float64, complex128], but got[" + << "For '" << op_name + << "', complex math binary op expecting Tensor [complex64, complex64],[complex64, float32], [float32, " + "complex64], [complex128, complex128], [complex128, float64] or [float64, complex128], but got [" << type_x->ToString() << ", " << type_y->ToString() << "]."; } } diff --git a/mindspore/core/ops/neg.cc b/mindspore/core/ops/neg.cc index c1d9c380363..26c1fd6457f 100644 --- a/mindspore/core/ops/neg.cc +++ b/mindspore/core/ops/neg.cc @@ -132,7 +132,7 @@ ValuePtr NegInferValue(const PrimitivePtr &prim, const std::vectorname() << "', the supported data type is ['int8', 'int16', 'int32', 'int64', 'uint8', " "'uint16','uint32', 'uint64','float16', 'float32', 'float64', 'complex64', 'complex128'], but got " - << x_tensor->ToString(); + << x_tensor->ToString() << "."; } } return result_tensor; diff --git a/mindspore/core/ops/neighborexchange.cc b/mindspore/core/ops/neighborexchange.cc index d5beb31407d..d99a65d0a44 100644 --- a/mindspore/core/ops/neighborexchange.cc +++ b/mindspore/core/ops/neighborexchange.cc @@ -47,38 +47,38 @@ void CheckAttr(const PrimitivePtr &primitive, const std::string &shape_attr_name try { auto attr = primitive->GetAttr(shape_attr_name); if (attr->cast() == nullptr) { - MS_EXCEPTION(TypeError) << "For '" << prim_name << "', Attr '" << shape_attr_name + MS_EXCEPTION(TypeError) << "For '" << prim_name << "', attr '" << shape_attr_name << "' is necessary, but missing it."; } attr_shapes = GetValue(attr); } catch (const std::exception &) { - MS_EXCEPTION(TypeError) << "For '" << prim_name << "', Attr " << shape_attr_name - << " must be a tuple(list, list, ...)."; + MS_EXCEPTION(TypeError) << "For '" << prim_name << "', attr '" << shape_attr_name + << "' must be a tuple(list, list, ...)."; } if (!attr_shapes.empty()) { auto ele = attr_shapes[0]->cast(); if (ele == nullptr) { - MS_EXCEPTION(TypeError) << "For '" << prim_name << "', Attr " << shape_attr_name - << " must be a tuple(list, list, ...)."; + MS_EXCEPTION(TypeError) << "For '" << prim_name << "', first element of attr " << shape_attr_name + << " can not be an empty pointer."; } } std::vector attr_rank_ids; try { auto attr = primitive->GetAttr(rank_ids_attr_name); if (attr->cast() != nullptr) { - MS_EXCEPTION(TypeError) << "For '" << prim_name << "', Attr '" << shape_attr_name + MS_EXCEPTION(TypeError) << "For '" << prim_name << "', attr '" << shape_attr_name << "' is necessary, but missing it"; } attr_rank_ids = GetValue>(attr); } catch (const std::exception &) { - MS_EXCEPTION(TypeError) << "For '" << prim_name << "', Attr " << rank_ids_attr_name - << " must be a list[int, int, ...]."; + MS_EXCEPTION(TypeError) << "For '" << prim_name << "', attr '" << rank_ids_attr_name + << "' must be a list[int, int, ...]."; } if (attr_shapes.size() != attr_rank_ids.size()) { MS_EXCEPTION(ValueError) << "For '" << primitive->name() << "', attr '" << shape_attr_name << "' size must be equal to attr '" << rank_ids_attr_name << "' size, but got attr '" - << shape_attr_name << "' size: " << attr_shapes.size() << ", attr " << rank_ids_attr_name - << " size: " << attr_rank_ids.size(); + << shape_attr_name << "' size: " << attr_shapes.size() << ", attr '" << rank_ids_attr_name + << "' size: " << attr_rank_ids.size(); } } @@ -95,8 +95,8 @@ void NeighborExchangeCheck(const PrimitivePtr &primitive, const std::vectorGetAttr(kNeighborExchangeRecvType); MS_EXCEPTION_IF_NULL(recv_type_attr); if (!recv_type_attr->isa()) { - MS_EXCEPTION(TypeError) << "For '" << prim_name << "', Attr " << kNeighborExchangeRecvType - << " should be a mindspore data type."; + MS_EXCEPTION(TypeError) << "For '" << prim_name << "', attr '" << kNeighborExchangeRecvType + << "' must be a mindspore data type."; } // check group auto group_attr = primitive->GetAttr(kGroup); @@ -104,7 +104,7 @@ void NeighborExchangeCheck(const PrimitivePtr &primitive, const std::vector(group_attr); } catch (const std::exception &) { - MS_EXCEPTION(TypeError) << "For '" << prim_name << "', Attr " << kGroup << " should be a str."; + MS_EXCEPTION(TypeError) << "For '" << prim_name << "', attr '" << kGroup << "' must be a str."; } // check empty input auto send_rank_ids = GetValue>(primitive->GetAttr(kSendRankIds));