From ad76e1c42a4d73f65f906f95a2444c41e5ef5969 Mon Sep 17 00:00:00 2001 From: leonwanghui Date: Sat, 4 Apr 2020 13:00:18 +0800 Subject: [PATCH] Fix some typo errors in pipeline module Signed-off-by: leonwanghui --- mindspore/ccsrc/ir/func_graph.cc | 12 ++-- mindspore/ccsrc/ir/func_graph.h | 4 +- mindspore/ccsrc/pipeline/action.cc | 2 +- mindspore/ccsrc/pipeline/base.h | 2 +- .../ccsrc/pipeline/parse/data_converter.cc | 21 +++--- .../ccsrc/pipeline/parse/function_block.cc | 4 +- mindspore/ccsrc/pipeline/parse/parse.cc | 50 ++++++------- mindspore/ccsrc/pipeline/parse/parse.h | 6 +- mindspore/ccsrc/pipeline/parse/parse_base.h | 4 +- .../ccsrc/pipeline/parse/python_adapter.cc | 2 + mindspore/ccsrc/pipeline/parse/resolve.cc | 6 +- mindspore/ccsrc/pipeline/pass.cc | 4 +- mindspore/ccsrc/pipeline/pipeline.cc | 70 +++++++++---------- mindspore/ccsrc/pipeline/pipeline_ge.cc | 16 ++--- mindspore/ccsrc/pipeline/resource.cc | 2 +- .../static_analysis/abstract_value.cc | 2 +- .../pipeline/static_analysis/abstract_value.h | 4 +- .../pipeline/static_analysis/evaluator.cc | 2 +- .../static_analysis/param_validator.cc | 10 +-- .../ccsrc/pipeline/static_analysis/prim.cc | 46 ++++++------ .../static_analysis/program_specialize.cc | 10 +-- .../static_analysis/static_analysis.cc | 12 ++-- 22 files changed, 149 insertions(+), 142 deletions(-) diff --git a/mindspore/ccsrc/ir/func_graph.cc b/mindspore/ccsrc/ir/func_graph.cc index f5153676354..7404db4af0b 100644 --- a/mindspore/ccsrc/ir/func_graph.cc +++ b/mindspore/ccsrc/ir/func_graph.cc @@ -64,14 +64,14 @@ AbstractFunctionPtr FuncGraph::abstract() { for (auto& p : parameters_) { MS_EXCEPTION_IF_NULL(p); if (p->abstract() == nullptr) { - MS_LOG(ERROR) << "error!!"; + MS_LOG(ERROR) << "Error!!"; return nullptr; } args_spec_list.push_back(p->abstract()); } if (nullptr == output()) { - MS_LOG(ERROR) << "error func graph no output"; + MS_LOG(ERROR) << "Error func graph no output"; return nullptr; } @@ -543,6 +543,7 @@ void FuncGraph::GenerateKwargReplNode(const FuncGraphPtr& specialized_graph, TraceManager::EndTrace(); } } + bool FuncGraph::NeedGenerate(const std::vector& kwarg_list) { // if the function does not have any vararg/kwarg/kwonly/default value/kw args input // return the original graph @@ -556,6 +557,7 @@ bool FuncGraph::NeedGenerate(const std::vector& } return true; } + void FuncGraph::GenerateDefaultValue(const FuncGraphPtr& specialized_graph, const std::vector& specialized_parameter_list, std::unordered_map* repl_nodes) { @@ -664,7 +666,7 @@ void FuncGraph::EraseUnusedNodeInOrder() { auto mng = manager_.lock(); if (mng) { auto nodes = mng->nodes()[shared_from_base()]; - // Erase unusued cnode. + // Erase unused cnode. for (auto it = order_.begin(); it != order_.end();) { if (nodes.count(*it)) { (void)it++; @@ -695,7 +697,7 @@ void FuncGraph::CheckOrder() { if (found == it) { DumpCNodeList(); MS_LOG(EXCEPTION) << "The cnode " << (*it)->DebugString() << " order in " << ToString() - << " doesn't obey the input denpency, " + << " doesn't obey the input dependency, " << "as input " << input_node->DebugString() << " is not ahead of itself."; } } @@ -842,5 +844,5 @@ void FuncGraph::SetEffectDepends(const std::vector& depend_inputs) { } const PrimitivePtr FuncGraphTransform::func_graph_prim_ = std::make_shared("FuncGraph"); -const char kFuncGraphFlagUndetermin[] = "Undeterminate"; +const char kFuncGraphFlagUndetermined[] = "Undeterminate"; } // namespace mindspore diff --git a/mindspore/ccsrc/ir/func_graph.h b/mindspore/ccsrc/ir/func_graph.h index 95b26de4735..13e8c9dc8fc 100644 --- a/mindspore/ccsrc/ir/func_graph.h +++ b/mindspore/ccsrc/ir/func_graph.h @@ -96,7 +96,7 @@ class FuncGraphBase : public Value { MS_DECLARE_PARENT(FuncGraphBase, Value); }; -extern const char kFuncGraphFlagUndetermin[]; +extern const char kFuncGraphFlagUndetermined[]; class FuncGraph : public FuncGraphBase { public: @@ -200,7 +200,7 @@ class FuncGraph : public FuncGraphBase { // get all func graphs directly used by this func graph const FuncGraphCounterMap &func_graphs_used(); - // get all func graphs nestedly used by this func graph + // get all func graphs nested used by this func graph const FuncGraphSet &func_graphs_used_total(); // get all users of this func graph diff --git a/mindspore/ccsrc/pipeline/action.cc b/mindspore/ccsrc/pipeline/action.cc index 83e524e5dfc..baf4bea7ece 100644 --- a/mindspore/ccsrc/pipeline/action.cc +++ b/mindspore/ccsrc/pipeline/action.cc @@ -183,7 +183,7 @@ bool SymbolResolveAction(const ResourcePtr& res) { FuncGraphPtr func_graph = res->func_graph(); auto succ = parse::ResolveFuncGraph(func_graph, res); - // Remove usued nodes in cnode order list. + // Remove unused nodes in cnode order list. func_graph->EraseUnusedNodeInOrder(); func_graph->ReleaseFullOrderToEffectOrder(); for (auto fg : func_graph->func_graphs_used_total()) { diff --git a/mindspore/ccsrc/pipeline/base.h b/mindspore/ccsrc/pipeline/base.h index d007eac2944..30524e84f68 100644 --- a/mindspore/ccsrc/pipeline/base.h +++ b/mindspore/ccsrc/pipeline/base.h @@ -40,7 +40,7 @@ using ExecutorInfoPtr = std::shared_ptr; inline std::string GetPhasePrefix(const std::string& phase) { auto pos = phase.find('.'); if (pos == std::string::npos) { - MS_LOG(EXCEPTION) << "phase has no . for prefix" << phase; + MS_LOG(EXCEPTION) << "Phase has no . for prefix" << phase; } return phase.substr(0, pos); } diff --git a/mindspore/ccsrc/pipeline/parse/data_converter.cc b/mindspore/ccsrc/pipeline/parse/data_converter.cc index aee7c35ba6c..d25a202afc4 100644 --- a/mindspore/ccsrc/pipeline/parse/data_converter.cc +++ b/mindspore/ccsrc/pipeline/parse/data_converter.cc @@ -39,7 +39,7 @@ using TensorPtr = mindspore::tensor::TensorPtr; namespace { bool ConvertTuple(const py::object& obj, ValuePtr* const data, bool use_signature) { - MS_LOG(DEBUG) << "converting python tuple"; + MS_LOG(DEBUG) << "Converting python tuple"; py::tuple tuple = obj.cast(); std::vector value_list; for (size_t it = 0; it < tuple.size(); ++it) { @@ -56,7 +56,7 @@ bool ConvertTuple(const py::object& obj, ValuePtr* const data, bool use_signatur } bool ConvertList(const py::object& obj, ValuePtr* const data, bool use_signature) { - MS_LOG(DEBUG) << "converting python list"; + MS_LOG(DEBUG) << "Converting python list"; py::list list = obj.cast(); std::vector value_list; @@ -73,7 +73,7 @@ bool ConvertList(const py::object& obj, ValuePtr* const data, bool use_signature } bool ConvertCellList(const py::object& obj, ValuePtr* const data, bool use_signature) { - MS_LOG(DEBUG) << "converting cell list"; + MS_LOG(DEBUG) << "Converting cell list"; py::sequence list = obj; std::vector value_list; for (size_t it = 0; it < list.size(); ++it) { @@ -89,7 +89,7 @@ bool ConvertCellList(const py::object& obj, ValuePtr* const data, bool use_signa } bool ConvertDict(const py::object& obj, ValuePtr* data, bool use_signature) { - MS_LOG(DEBUG) << "converting python dict"; + MS_LOG(DEBUG) << "Converting python dict"; py::dict dict_values = obj.cast(); std::vector> key_values; @@ -110,14 +110,14 @@ bool ConvertDict(const py::object& obj, ValuePtr* data, bool use_signature) { } void ConvertNameSpace(const py::object& obj, ValuePtr* const data) { - MS_LOG(DEBUG) << "converting python module"; + MS_LOG(DEBUG) << "Converting python module"; py::module mod = python_adapter::GetPyModule(PYTHON_MOD_PARSE_MODULE); py::object module_namespace = python_adapter::CallPyModFn(mod, PYTHON_MOD_GET_MODULE_NAMESPACE, obj); *data = std::make_shared(RESOLVE_NAMESPACE_NAME_MODULE, py::cast(module_namespace)); } void ConvertDataClass(py::object obj, ValuePtr* const data) { - MS_LOG(DEBUG) << "converting dataclass"; + MS_LOG(DEBUG) << "Converting dataclass"; // Maybe the obj is dataclass define auto desc = py::cast(python_adapter::CallPyObjMethod(obj, PYTHON_GET_OBJ_DESC, obj)); // desc has format "", strip the '<' and '>' by offset 1; @@ -247,7 +247,7 @@ bool ConvertOtherObj(py::object obj, ValuePtr* const data) { bool ConvertData(const py::object& obj, ValuePtr* const data, bool use_signature) { // check parameter valid if (data == nullptr) { - MS_LOG(ERROR) << " data is null pointer"; + MS_LOG(ERROR) << "Data is null pointer"; return false; } @@ -386,9 +386,9 @@ py::object CreatePythonObject(const py::object& type, const py::tuple& params) { py::module mod = python_adapter::GetPyModule(PYTHON_MOD_PARSE_MODULE); py::object obj; if (params.size() == 0) { - obj = python_adapter::CallPyModFn(mod, PYTHON_MOD_CRETAE_OBJ_INSTANCE, type); + obj = python_adapter::CallPyModFn(mod, PYTHON_MOD_CREATE_OBJ_INSTANCE, type); } else { - obj = python_adapter::CallPyModFn(mod, PYTHON_MOD_CRETAE_OBJ_INSTANCE, type, params); + obj = python_adapter::CallPyModFn(mod, PYTHON_MOD_CREATE_OBJ_INSTANCE, type, params); } return obj; } @@ -421,6 +421,7 @@ ValuePtr PyDataToValue(const py::object& obj) { (void)ConvertData(to_convert, &value); return value; } + void ClearObjectCache() { object_map_.clear(); object_graphs_map_.clear(); @@ -445,7 +446,7 @@ ClassPtr ParseDataClass(const py::object& cls_obj) { for (auto& item : names) { TypePtr type_value = item.second.cast(); MS_EXCEPTION_IF_NULL(type_value); - MS_LOG(DEBUG) << "(name: " << py::cast(item.first) << ", type: " << type_value->ToString() << ")"; + MS_LOG(DEBUG) << "(Name: " << py::cast(item.first) << ", type: " << type_value->ToString() << ")"; attributes.push_back(std::make_pair(py::cast(item.first), type_value)); } diff --git a/mindspore/ccsrc/pipeline/parse/function_block.cc b/mindspore/ccsrc/pipeline/parse/function_block.cc index 25cc3ab4d8d..423e76c1d87 100644 --- a/mindspore/ccsrc/pipeline/parse/function_block.cc +++ b/mindspore/ccsrc/pipeline/parse/function_block.cc @@ -254,7 +254,7 @@ void FunctionBlock::Mature() { matured_ = true; } -// Force the conditon node to bool using bool operation +// Force the conditIon node to bool using bool operation CNodePtr FunctionBlock::ForceToBoolNode(const AnfNodePtr& cond) { TraceManager::DebugTrace(std::make_shared(cond->debug_info())); CNodePtr op_apply_node = func_graph()->NewCNode({MakeResolveOperation(NAMED_PRIMITIVE_BOOL), cond}); @@ -347,7 +347,7 @@ void FunctionBlock::InsertDependItemsBeforeReturn() { auto return_node = func_graph()->get_return(); if (return_node) { if (return_node->inputs().size() < 1) { - MS_LOG(EXCEPTION) << "length of inputs of output node is less than 2"; + MS_LOG(EXCEPTION) << "Length of inputs of output node is less than 2"; } old_ret = return_node->input(1); } else { diff --git a/mindspore/ccsrc/pipeline/parse/parse.cc b/mindspore/ccsrc/pipeline/parse/parse.cc index 60cc00a3070..231b98ab003 100644 --- a/mindspore/ccsrc/pipeline/parse/parse.cc +++ b/mindspore/ccsrc/pipeline/parse/parse.cc @@ -294,14 +294,14 @@ FunctionBlockPtr Parser::ParseStatement(const FunctionBlockPtr &block, const py: TraceManager::EndTrace(); return stmt_block; } else { - errcode_ = PARSE_NODE_METHOD_UNSUPPORT; + errcode_ = PARSE_NODE_METHOD_UNSUPPORTED; py::list location = ast_->CallParserObjMethod(PYTHON_PARSE_GET_LOCATION, node); if (location.size() < 2) { MS_LOG(EXCEPTION) << "List size should not be less than 2."; } auto filename = location[0].cast(); auto line_no = location[1].cast(); - MS_LOG(EXCEPTION) << "unsupported syntax '" << node_name << "' at " << filename << ":" << line_no; + MS_LOG(EXCEPTION) << "Unsupported syntax '" << node_name << "' at " << filename << ":" << line_no; } } @@ -324,11 +324,11 @@ AnfNodePtr Parser::ParseExprNode(const FunctionBlockPtr &block, const py::object TraceManager::EndTrace(); return expr_node; } else { - errcode_ = PARSE_NODE_METHOD_UNSUPPORT; + errcode_ = PARSE_NODE_METHOD_UNSUPPORTED; py::list ret = ast_->CallParserObjMethod(PYTHON_PARSE_GET_LOCATION, node); auto filename = ret[0].cast(); auto line_no = ret[1].cast(); - MS_LOG(EXCEPTION) << "unsupported syntax '" << node_name << "' at " << filename << ":" << line_no; + MS_LOG(EXCEPTION) << "Unsupported syntax '" << node_name << "' at " << filename << ":" << line_no; } } @@ -339,7 +339,7 @@ FunctionBlockPtr Parser::ParseExpr(const FunctionBlockPtr &block, const py::obje // Expr only have value , no target py::tuple expand_info = ast_->CallParserObjMethod(PYTHON_PARSE_EXPAND_EXPR_STATEMENT, node); - // refer pypthon function expand_expr_statement, expand_info is one of the following: + // refer python function expand_expr_statement, expand_info is one of the following: // True, expr.value, x // True, expr.value // False, None, None @@ -453,8 +453,8 @@ AnfNodePtr Parser::ParseNum(const FunctionBlockPtr &, const py::object &node) { return NewValueNode(data); } else { // no else actually - MS_LOG(ERROR) << "unsupported Num type : " << (std::string)py::str(obj) << GetLocation(node)->ToString(); - errcode_ = PARSE_NODE_TYPE_UNKONW; + MS_LOG(ERROR) << "Unsupported Num type : " << (std::string)py::str(obj) << GetLocation(node)->ToString(); + errcode_ = PARSE_NODE_TYPE_UNKOWN; return nullptr; } } @@ -478,8 +478,8 @@ AnfNodePtr Parser::ParseNameConstant(const FunctionBlockPtr &, const py::object return NewValueNode(kNone); } else { // no else actually - MS_LOG(ERROR) << "unsupported NameConstant type: " << (std::string)py::str(obj) << GetLocation(node)->ToString(); - errcode_ = PARSE_NODE_TYPE_UNKONW; + MS_LOG(ERROR) << "Unsupported NameConstant type: " << (std::string)py::str(obj) << GetLocation(node)->ToString(); + errcode_ = PARSE_NODE_TYPE_UNKOWN; return nullptr; } } @@ -497,7 +497,7 @@ AnfNodePtr Parser::ParseCall(const FunctionBlockPtr &block, const py::object &no // process function call py::object function_ast_node = python_adapter::GetPyObjAttr(node, "func"); AnfNodePtr call_function_anf_node = ParseExprNode(block, function_ast_node); - // function call arguments should be passed in as groups and upacked later using unpack call + // function call arguments should be passed in as groups and unpacked later using unpack call py::list args = python_adapter::GetPyObjAttr(node, "args"); std::vector packed_arguments; std::vector group_arguments; @@ -614,7 +614,7 @@ AnfNodePtr Parser::ParseAttribute(const FunctionBlockPtr &block, const py::objec py::object value_body = python_adapter::GetPyObjAttr(node, "value"); AnfNodePtr value_node = ParseExprNode(block, value_body); if (value_node == nullptr) { - MS_LOG(WARNING) << "Parse Attribut failed"; + MS_LOG(WARNING) << "Parse attribute failed"; return nullptr; } @@ -637,7 +637,7 @@ AnfNodePtr Parser::ParseCompare(const FunctionBlockPtr &block, const py::object // which there is two ops , but we only support one now py::list ops = python_adapter::GetPyObjAttr(node, "ops"); if (ops.size() > MAX_COMPARISON_OPS_SUPPORTED) { - MS_LOG(ERROR) << "mindspore does not support comparison with operators more than one now, ops size =" << ops.size(); + MS_LOG(ERROR) << "MindSpore does not support comparison with operators more than one now, ops size =" << ops.size(); return nullptr; } @@ -817,7 +817,7 @@ AnfNodePtr Parser::ParseIndex(const FunctionBlockPtr &block, const py::object &n // process a UnaryOp, +a, -b AnfNodePtr Parser::ParseUnaryOp(const FunctionBlockPtr &block, const py::object &node) { - MS_LOG(DEBUG) << "process ast UnaryOp"; + MS_LOG(DEBUG) << "Process ast UnaryOp"; py::object op = python_adapter::GetPyObjAttr(node, "op"); MS_EXCEPTION_IF_NULL(block); @@ -831,7 +831,7 @@ AnfNodePtr Parser::ParseUnaryOp(const FunctionBlockPtr &block, const py::object // process a dict ast node expression AnfNodePtr Parser::ParseDict(const FunctionBlockPtr &block, const py::object &node) { - MS_LOG(DEBUG) << "process ast Dict"; + MS_LOG(DEBUG) << "Process ast Dict"; py::list keys = node.attr("keys"); py::list values = node.attr("values"); std::vector key_nodes; @@ -849,7 +849,7 @@ AnfNodePtr Parser::ParseDict(const FunctionBlockPtr &block, const py::object &no // process a augment assign such as a += b; FunctionBlockPtr Parser::ParseAugAssign(const FunctionBlockPtr &block, const py::object &node) { - MS_LOG(DEBUG) << "process ast AugAssign"; + MS_LOG(DEBUG) << "Process ast AugAssign"; py::object op = python_adapter::GetPyObjAttr(node, "op"); MS_EXCEPTION_IF_NULL(block); @@ -864,10 +864,10 @@ FunctionBlockPtr Parser::ParseAugAssign(const FunctionBlockPtr &block, const py: } else if (ast_->IsClassMember(target_node)) { read_node = ParseAttribute(block, target_node); } else { - MS_LOG(EXCEPTION) << "not supported augassign"; + MS_LOG(EXCEPTION) << "Not supported augassign"; } if (read_node == nullptr) { - MS_LOG(EXCEPTION) << "can not get target node "; + MS_LOG(EXCEPTION) << "Can not get target node "; } py::object value = python_adapter::GetPyObjAttr(node, "value"); @@ -879,7 +879,7 @@ FunctionBlockPtr Parser::ParseAugAssign(const FunctionBlockPtr &block, const py: // process global declaration such as 'global x'; FunctionBlockPtr Parser::ParseGlobal(const FunctionBlockPtr &block, const py::object &node) { - MS_LOG(DEBUG) << "process ast Global"; + MS_LOG(DEBUG) << "Process ast Global"; MS_EXCEPTION_IF_NULL(block); py::list vars = python_adapter::GetPyObjAttr(node, "names"); for (auto &item : vars) { @@ -890,7 +890,7 @@ FunctionBlockPtr Parser::ParseGlobal(const FunctionBlockPtr &block, const py::ob // process a if statement FunctionBlockPtr Parser::ParseIf(const FunctionBlockPtr &block, const py::object &node) { - MS_LOG(DEBUG) << "process ast If"; + MS_LOG(DEBUG) << "Process ast If"; py::object test_node = python_adapter::GetPyObjAttr(node, "test"); AnfNodePtr condition_node = ParseExprNode(block, test_node); MS_EXCEPTION_IF_NULL(block); @@ -934,7 +934,7 @@ FunctionBlockPtr Parser::ParseIf(const FunctionBlockPtr &block, const py::object } FunctionBlockPtr Parser::ParseWhile(const FunctionBlockPtr &block, const py::object &node) { - MS_LOG(DEBUG) << "process ast While"; + MS_LOG(DEBUG) << "Process ast While"; MS_EXCEPTION_IF_NULL(block); MS_LOG(INFO) << "Parse while statement"; TraceManager::DebugTrace(std::make_shared(block->func_graph()->debug_info())); @@ -999,7 +999,7 @@ FunctionBlockPtr Parser::GenerateBlockInFor(const TraceInfoPtr &trace_info) { // x, it = next(it) // body FunctionBlockPtr Parser::ParseFor(const FunctionBlockPtr &block, const py::object &node) { - MS_LOG(DEBUG) << "process ast For"; + MS_LOG(DEBUG) << "Process ast For"; MS_EXCEPTION_IF_NULL(block); AnfNodePtr op_iter = block->MakeResolveOperation(NAMED_PRIMITIVE_ITER); AnfNodePtr op_next = block->MakeResolveOperation(NAMED_PRIMITIVE_NEXT); @@ -1054,7 +1054,7 @@ FunctionBlockPtr Parser::ParseFor(const FunctionBlockPtr &block, const py::objec return after_block; } AnfNodePtr Parser::ParseIfExp(const FunctionBlockPtr &block, const py::object &node) { - MS_LOG(DEBUG) << "process ast IfExp"; + MS_LOG(DEBUG) << "Process ast IfExp"; MS_EXCEPTION_IF_NULL(block); py::object test_node = python_adapter::GetPyObjAttr(node, "test"); AnfNodePtr condition_node = ParseExprNode(block, test_node); @@ -1163,7 +1163,7 @@ void Parser::HandleAssignSubscript(const FunctionBlockPtr &block, const py::obje void Parser::WriteAssignVars(const FunctionBlockPtr &block, const py::object &targ, const AnfNodePtr &value_node) { MS_EXCEPTION_IF_NULL(value_node); - MS_LOG(DEBUG) << "process WriteAssignVars"; + MS_LOG(DEBUG) << "Process WriteAssignVars"; auto ast_type = AstSubType(py::cast(ast_->CallParserObjMethod(PYTHON_PARSE_GET_AST_TYPE, targ))); if (ast_type == AST_SUB_TYPE_NAME) { HandleAssignName(block, targ, value_node); @@ -1174,7 +1174,7 @@ void Parser::WriteAssignVars(const FunctionBlockPtr &block, const py::object &ta } else if (ast_->IsClassMember(targ)) { HandleAssignClassMember(block, targ, value_node); } else { - MS_LOG(EXCEPTION) << "not supported assign type: " << ast_type + MS_LOG(EXCEPTION) << "Not supported assign type: " << ast_type << " NodeInfo: " << trace::GetDebugInfo(value_node->debug_info()); } } @@ -1340,7 +1340,7 @@ bool ParseAst::UpdateFuncGraphFlags(const FuncGraphPtr &func_graph) { py::dict flags = python_adapter::GetPyObjAttr(obj_, PYTHON_EXTERN_MINDSPORE_FLAG); for (auto &item : flags) { if (!py::isinstance(item.first) || !py::isinstance(item.second)) { - MS_LOG(ERROR) << "type error in flags dict convert"; + MS_LOG(ERROR) << "Type error in flags dict convert"; return false; } auto name = py::cast(item.first); diff --git a/mindspore/ccsrc/pipeline/parse/parse.h b/mindspore/ccsrc/pipeline/parse/parse.h index 3e891e47dd4..4dd1bc62aaa 100644 --- a/mindspore/ccsrc/pipeline/parse/parse.h +++ b/mindspore/ccsrc/pipeline/parse/parse.h @@ -40,8 +40,8 @@ enum ParseStatusCode : int { PARSE_PARAMETER_INVALID, // parameter is invalid PARSE_NO_RETURN, // function no return node PARSE_NODE_TYPE_NO_MATCH, // ast node type is error - PARSE_NODE_TYPE_UNKONW, // node type is unkonw - PARSE_NODE_METHOD_UNSUPPORT, // no method to parse the node + PARSE_NODE_TYPE_UNKOWN, // node type is unkown + PARSE_NODE_METHOD_UNSUPPORTED, // no method to parse the node PARSE_DONT_RESOLVE_SYMBOL, // can't resolve the string PARSE_NOT_SUPPORTED_COMPARE_EXPR, // the comparison is not supported PARSE_FAILURE = 0xFF @@ -102,7 +102,7 @@ class Parser { AnfNodePtr ParseCall(const FunctionBlockPtr &block, const py::object &node); // process the if expression AnfNodePtr ParseIfExp(const FunctionBlockPtr &block, const py::object &node); - // process calss type define + // process class type define AnfNodePtr ParseAttribute(const FunctionBlockPtr &block, const py::object &node); // process a compare expression AnfNodePtr ParseCompare(const FunctionBlockPtr &block, const py::object &node); diff --git a/mindspore/ccsrc/pipeline/parse/parse_base.h b/mindspore/ccsrc/pipeline/parse/parse_base.h index 9f92687b6fe..df2d1968a51 100644 --- a/mindspore/ccsrc/pipeline/parse/parse_base.h +++ b/mindspore/ccsrc/pipeline/parse/parse_base.h @@ -45,7 +45,7 @@ enum AstSubType : int { // define the parse target type enum ParseTargetTypeDef { - PARSE_TARGET_FUNCTION = 0, // funciton + PARSE_TARGET_FUNCTION = 0, // function PARSE_TARGET_METHOD = 1, // method PARSE_TARGET_OBJECT_INSTANCE = 2, // object instance PARSE_TARGET_UNKNOW = 0xFF // ERROR TYPE @@ -59,7 +59,7 @@ const char PYTHON_MOD_RESOLVE_GET_OBJ_KEY[] = "get_object_key"; const char PYTHON_MOD_PARSE_CHECK_IS_CLASS_MEMBER[] = "is_class_member"; const char PYTHON_MOD_RESOLVE_GET_OBJ_TYPE[] = "get_obj_type"; const char PYTHON_MOD_GET_CLASS_INSTANCE_TYPE[] = "get_class_instance_type"; -const char PYTHON_MOD_CRETAE_OBJ_INSTANCE[] = "create_obj_instance"; +const char PYTHON_MOD_CREATE_OBJ_INSTANCE[] = "create_obj_instance"; const char PYTHON_MOD_GET_DATACLASS_ATTRS[] = "get_dataclass_attributes"; const char PYTHON_MOD_GET_DATACLASS_METHODS[] = "get_dataclass_methods"; const char PYTHON_MOD_GET_MODULE_NAMESPACE[] = "get_module_namespace"; diff --git a/mindspore/ccsrc/pipeline/parse/python_adapter.cc b/mindspore/ccsrc/pipeline/parse/python_adapter.cc index db402387295..e2c86164d4a 100644 --- a/mindspore/ccsrc/pipeline/parse/python_adapter.cc +++ b/mindspore/ccsrc/pipeline/parse/python_adapter.cc @@ -50,6 +50,7 @@ void SetPythonPath(const std::string& path) { (void)sys_path.attr("append")(path.c_str()); } } + std::shared_ptr set_python_scoped() { // if start process from python, no need set the python scope. if (!python_env_) { @@ -79,6 +80,7 @@ py::object GetPyObjAttr(const py::object& obj, const std::string& attr) { } return py::none(); } + py::object GetPyFn(const std::string& module, const std::string& name) { (void)python_adapter::set_python_scoped(); if (!module.empty() && !name.empty()) { diff --git a/mindspore/ccsrc/pipeline/parse/resolve.cc b/mindspore/ccsrc/pipeline/parse/resolve.cc index 976c474aa45..ebc1f65486c 100644 --- a/mindspore/ccsrc/pipeline/parse/resolve.cc +++ b/mindspore/ccsrc/pipeline/parse/resolve.cc @@ -53,6 +53,7 @@ abstract::AbstractBasePtr ClassType::ToAbstract() { ret_val->set_value_desc(ToString()); return ret_val; } + // call python PYTHON_MOD_RESOLVE_FUNCTION interface to resolve the symbol in corresponding namespace bool SymbolResolver::Resolve() { py::module mod = python_adapter::GetPyModule(PYTHON_MOD_PARSE_MODULE); @@ -127,7 +128,7 @@ bool ResolveObjectToNode(const FuncGraphPtr& func_graph, const py::object& obj, MS_LOG(ERROR) << "Resolve parameter object failed, got nullptr"; return false; } - MS_LOG(DEBUG) << "add param graph:" << func_graph->ToString() << ", " << param->DebugString(); + MS_LOG(DEBUG) << "Add param graph:" << func_graph->ToString() << ", " << param->DebugString(); output = param; } else if (py::hasattr(obj, "__parameter_tuple__")) { @@ -160,6 +161,7 @@ bool ResolveObjectToNode(const FuncGraphPtr& func_graph, const py::object& obj, *node = output; return true; } + // transform the ValueTuple or ValueList of graph node to make tuple of const graph node bool TransformVectorGraphValueNode(const FuncGraphManagerPtr& manager, const AnfNodePtr& node, const ValueNodePtr& value_node, AnfNodePtr* const transformed) { @@ -175,7 +177,7 @@ bool TransformVectorGraphValueNode(const FuncGraphManagerPtr& manager, const Anf continue; } if (has_graph_in_list) { - MS_LOG(EXCEPTION) << "list has graph in it , but not all is graph"; + MS_LOG(EXCEPTION) << "List has graph in it, but not all is graph"; } } // The celllist or ordered_cell will be parsed as valuetuple of const graph in it, diff --git a/mindspore/ccsrc/pipeline/pass.cc b/mindspore/ccsrc/pipeline/pass.cc index e2626d53145..d89a0090a79 100644 --- a/mindspore/ccsrc/pipeline/pass.cc +++ b/mindspore/ccsrc/pipeline/pass.cc @@ -195,13 +195,13 @@ void ReclaimOptimizer() { bool OptPassGroup(const ResourcePtr& res, const std::string& name) { if (res->func_graph() == nullptr) { - MS_LOG(ERROR) << "opt passes int error"; + MS_LOG(ERROR) << "Opt passes int error"; return false; } abstract::AbstractBasePtrList args = res->args_spec(); FuncGraphPtr func_graph = res->func_graph(); - MS_LOG(DEBUG) << "start " << name << " func graph:" << func_graph->ToString() << ", " + MS_LOG(DEBUG) << "Start " << name << " func graph:" << func_graph->ToString() << ", " << func_graph->get_return()->DebugString(true); InitOpt(res); if (g_pass_opts.find(name) != g_pass_opts.end()) { diff --git a/mindspore/ccsrc/pipeline/pipeline.cc b/mindspore/ccsrc/pipeline/pipeline.cc index 861862b849a..0d7790fb360 100644 --- a/mindspore/ccsrc/pipeline/pipeline.cc +++ b/mindspore/ccsrc/pipeline/pipeline.cc @@ -98,7 +98,7 @@ py::tuple GenerateKey(const std::string& name, const std::unordered_mapfunc_graph; } std::size_t ExecutorPy::ArgListSize(const std::string& phase) { if (info_.count(phase) == 0) { - MS_LOG(EXCEPTION) << "no phase in executor:" << GetPhasePrefix(phase); + MS_LOG(EXCEPTION) << "No phase in executor:" << GetPhasePrefix(phase); } return info_[phase]->arg_list_size; } @@ -243,7 +243,7 @@ void ExecutorPy::DelNetRes(const std::string& id) { auto tmp_info = info_; for (auto& item : tmp_info) { if (item.first.find(id) != string::npos) { - MS_LOG(INFO) << "delete network res:" << item.first; + MS_LOG(INFO) << "Delete network res:" << item.first; (void)info_.erase(item.first); flag = true; } @@ -262,7 +262,7 @@ void ExecutorPy::DelNetRes(const std::string& id) { } void ExecutorPy::ClearRes() { - MS_LOG(INFO) << "clean executor Resrouce!"; + MS_LOG(INFO) << "Clean executor resource!"; executor_ = nullptr; } @@ -278,27 +278,27 @@ void ExecutorPy::SaveCompiledGraph(const std::string& phase_s) { MS_EXCEPTION_IF_NULL(parallel::ParallelContext::GetInstance()); std::string parallel_mode = parallel::ParallelContext::GetInstance()->parallel_mode(); - MS_LOG(INFO) << "save compiled func graph(" << func_graph->ToString() << ") phase(" << phase_s << ")!"; + MS_LOG(INFO) << "Save compiled func graph(" << func_graph->ToString() << ") phase(" << phase_s << ")!"; info_[phase_s]->func_graph = func_graph; if ((func_graph != nullptr) && ((parallel_mode == parallel::AUTO_PARALLEL) || (parallel_mode == parallel::SEMI_AUTO_PARALLEL))) { - MS_LOG(DEBUG) << "save model parallel parameter layout graph!"; + MS_LOG(DEBUG) << "Save model parallel parameter layout graph!"; func_graph = info_[phase_s]->resource->results()[kStepParallelGraph].cast(); - ExecutorInfoPtr excutor_info = std::make_shared(); + ExecutorInfoPtr executor_info = std::make_shared(); std::string layout_graph = phase_s + kStepParallelGraph; - excutor_info->func_graph = func_graph; - info_[layout_graph] = excutor_info; + executor_info->func_graph = func_graph; + info_[layout_graph] = executor_info; } else { - MS_LOG(DEBUG) << "save model parallel parameter layout graph null!"; + MS_LOG(DEBUG) << "Save model parallel parameter layout graph null!"; } - MS_LOG(INFO) << "end save compiled func graph!"; + MS_LOG(INFO) << "End save compiled func graph!"; } bool ExecutorPy::ChangeExportGeirUseVmFlag(bool use_vm, const std::string& phase_s) const { std::string phase_prefix = GetPhasePrefix(phase_s); if (use_vm && phase_prefix == "export") { - MS_LOG(INFO) << "use ge backend to export geir"; + MS_LOG(INFO) << "Use ge backend to export geir"; use_vm = false; } return use_vm; @@ -316,7 +316,7 @@ void ExecutorPy::GetGeBackendPolicy() const { bool ExecutorPy::CompileInner(const py::object& obj, const py::tuple& args, const py::object& phase, bool use_vm) { MS_LOG(DEBUG) << "Start ExecutorPy compile!"; if ((!py::isinstance(phase))) { - MS_LOG(ERROR) << "arg phase must be string."; + MS_LOG(ERROR) << "Arg phase must be string."; return false; } // check the arg valid? @@ -327,7 +327,7 @@ bool ExecutorPy::CompileInner(const py::object& obj, const py::tuple& args, cons #ifdef ENABLE_GE GetGeBackendPolicy(); #endif - ExecutorInfoPtr excutor_info = std::make_shared(); + ExecutorInfoPtr executor_info = std::make_shared(); std::string phase_s = py::cast(phase); MS_LOG(INFO) << "ExecutorPy compile phase:" << phase_s << "!"; ResourcePtr resource = std::make_shared(obj); @@ -353,16 +353,16 @@ bool ExecutorPy::CompileInner(const py::object& obj, const py::tuple& args, cons ValuePtr converted = nullptr; bool succ = parse::ConvertData(args[i], &converted); if (!succ) { - MS_LOG(EXCEPTION) << "args convert error"; + MS_LOG(EXCEPTION) << "Args convert error"; } bool broaden = true; args_spec.push_back(abstract::FromValue(converted, broaden)); } resource->set_args_spec(args_spec); - excutor_info->arg_list_size = size; - excutor_info->resource = resource; - info_[phase_s] = excutor_info; + executor_info->arg_list_size = size; + executor_info->resource = resource; + info_[phase_s] = executor_info; pip->Run(); // save the run graph func to MsPipeLine @@ -439,7 +439,7 @@ std::string GetMsIrFile(void) { char real_path[PATH_MAX] = {0}; if (realpath(path, real_path) == nullptr) { - MS_LOG(ERROR) << "MS IR Path error, " << path; + MS_LOG(ERROR) << "MS IR path error, " << path; return file; } file = real_path; @@ -485,7 +485,7 @@ void RunPipelineAction(const ActionItem& action, pipeline::ResourcePtr resource, #endif void Pipeline::Run() { - MS_LOG(INFO) << "pipeline run"; + MS_LOG(INFO) << "Pipeline run"; MS_EXCEPTION_IF_NULL(resource_); FuncGraphPtr user_graph = nullptr; @@ -507,7 +507,7 @@ void Pipeline::Run() { MS_LOG(DEBUG) << "Action " << action.first << " end."; }; if (!result) { - MS_LOG(EXCEPTION) << "pipeline running to end, failed in step:" << action.first; + MS_LOG(EXCEPTION) << "Pipeline running to end, failed in step:" << action.first; } if (MsContext::GetInstance()->save_graphs_flag() && resource_->func_graph() != nullptr) { auto graph = resource_->func_graph(); @@ -555,7 +555,7 @@ void Pipeline::Run() { if (MsContext::GetInstance()->save_graphs_flag() && (user_graph != nullptr)) { std::string user_graph_file = GetFilePathName("ModelDigraph.dot"); - MS_LOG(DEBUG) << "save user graph to: " << user_graph_file; + MS_LOG(DEBUG) << "Save user graph to: " << user_graph_file; draw::DrawUserFuncGraph(user_graph_file, user_graph); #ifdef ENABLE_DUMP_IR @@ -572,7 +572,7 @@ void Pipeline::Run() { ChangeFileMode(filename, S_IRUSR); #endif } - MS_LOG(INFO) << "end"; + MS_LOG(INFO) << "End"; } void ExecutorPy::ProcessVmArg(const py::tuple& args, const std::string& phase, VectorRef* arg_list) { @@ -582,7 +582,7 @@ void ExecutorPy::ProcessVmArg(const py::tuple& args, const std::string& phase, V py::object arg = args[i]; auto ms_context = MsContext::GetInstance(); if (ms_context->backend_policy() == kMsConvert && py::isinstance(arg)) { - MS_LOG(EXCEPTION) << "args[" << i << "] is numpy array, not tensor"; + MS_LOG(EXCEPTION) << "Args[" << i << "] is numpy array, not tensor"; } (*arg_list).push_back(arg); } @@ -642,9 +642,9 @@ py::object ExecutorPy::Run(const py::tuple& args, const py::object& phase) { MS_LOG(EXCEPTION) << "Can't find run graph func for " << phase_s; } - MS_LOG(DEBUG) << "eval run" << backend; + MS_LOG(DEBUG) << "Eval run" << backend; BaseRef value = (*run)(arg_list); - MS_LOG(DEBUG) << "run end"; + MS_LOG(DEBUG) << "Run end"; return BaseRefToPyData(value); } @@ -704,9 +704,9 @@ bool InitExecDatasetVm(const std::string& queue_name, int64_t size, int64_t batc p_init->set_attr("shapes", MakeValue(int_shapes)); p_init->set_attr("input_indexes", MakeValue(int_input_indexes)); - const std::vector emply_str_list; - p_init->set_attr("input_names", MakeValue(emply_str_list)); - p_init->set_attr("output_names", MakeValue(emply_str_list)); + const std::vector empty_str_list; + p_init->set_attr("input_names", MakeValue(empty_str_list)); + p_init->set_attr("output_names", MakeValue(empty_str_list)); FuncGraphPtr func_graph = std::make_shared(); auto app_init = std::make_shared(AnfNodePtrList{NewValueNode(p_init)}, func_graph); @@ -730,7 +730,7 @@ bool InitExecDatasetVm(const std::string& queue_name, int64_t size, int64_t batc if (!(*runner.run)) { // empty function - MS_LOG(EXCEPTION) << "Backend " << backend->name() << " unsupports tdt dataset."; + MS_LOG(EXCEPTION) << "Backend " << backend->name() << " unsupported tdt dataset."; } // launch init dataset runner without inputs and outputs @@ -758,7 +758,7 @@ void InitHccl() { auto runtime_instance = device::KernelRuntimeManager::Instance().GetKernelRuntime(device_name, device_id); MS_EXCEPTION_IF_NULL(runtime_instance); if (!runtime_instance->Init()) { - MS_LOG(ERROR) << "kernel runtime init error."; + MS_LOG(ERROR) << "Kernel runtime init error."; return; } } @@ -795,7 +795,7 @@ void InitGe() { auto ms_context = MsContext::GetInstance(); MS_EXCEPTION_IF_NULL(ms_context); if (!ms_context->OpenTsd()) { - MS_LOG(EXCEPTION) << "open tsd failed"; + MS_LOG(EXCEPTION) << "Open tsd failed"; } (void)ms_context->InitGe(); } diff --git a/mindspore/ccsrc/pipeline/pipeline_ge.cc b/mindspore/ccsrc/pipeline/pipeline_ge.cc index 5a2a8039dd9..abf55bb2d8c 100644 --- a/mindspore/ccsrc/pipeline/pipeline_ge.cc +++ b/mindspore/ccsrc/pipeline/pipeline_ge.cc @@ -210,7 +210,7 @@ bool AddDFGraph(const std::map& info, const py::di (void)convertor.GenerateCheckpointGraph(); if (convertor.ErrCode() != 0) { DfGraphManager::GetInstance().ClearGraph(); - MS_LOG(ERROR) << "convert df graph failed, err:" << convertor.ErrCode(); + MS_LOG(ERROR) << "Convert df graph failed, err:" << convertor.ErrCode(); return false; } @@ -238,7 +238,7 @@ bool AddDFGraph(const std::map& info, const py::di FuncGraphPtr BuildDFGraph(const std::map& info, const py::dict& init_params, const std::string& phase, const py::object& broadcast_params) { if (info.count(phase) == 0) { - MS_LOG(EXCEPTION) << "no phase in executor:" << GetPhasePrefix(phase); + MS_LOG(EXCEPTION) << "No phase in executor:" << GetPhasePrefix(phase); } FuncGraphPtr anf_graph = info.at(phase)->func_graph; @@ -389,7 +389,7 @@ std::shared_ptr DoExecGraph(const FuncGraphPtr& graph, const std::ve const std::string& phase) { std::vector ge_tensors = TransformUtil::ConvertInputTensors(inputs, kOpFormat_NCHW); if (ge_tensors.size() != inputs.size()) { - MS_LOG(ERROR) << "args convert to ge tensor error"; + MS_LOG(ERROR) << "Args convert to ge tensor error"; return nullptr; } @@ -444,7 +444,7 @@ void ProcessGeArg(const std::map& info, const py:: std::size_t size = args.size(); if (info.count(phase) == 0) { - MS_LOG(EXCEPTION) << "no phase in executor:" << GetPhasePrefix(phase); + MS_LOG(EXCEPTION) << "No phase in executor:" << GetPhasePrefix(phase); } auto arg_size = info.at(phase)->arg_list_size; @@ -459,12 +459,12 @@ void ProcessGeArg(const std::map& info, const py:: ValuePtr converted = nullptr; bool succ = parse::ConvertData(args[i], &converted); if (!succ) { - MS_LOG(EXCEPTION) << "args convert error"; + MS_LOG(EXCEPTION) << "Args convert error"; } if (converted->isa()) { (*inputs).push_back(converted->cast()); } else { - MS_LOG(EXCEPTION) << "args, " << converted->ToString() << " is not tensor"; + MS_LOG(EXCEPTION) << "Args " << converted->ToString() << " is not tensor"; } } } @@ -481,7 +481,7 @@ py::object ExecDFGraph(const std::map& info, const } if (info.count(phase) == 0) { - MS_LOG(EXCEPTION) << "has no phase:" << phase; + MS_LOG(EXCEPTION) << "There is no phase:" << phase; } FuncGraphPtr anf_graph = info.at(phase)->func_graph; @@ -511,7 +511,7 @@ py::object ExecDFGraph(const std::map& info, const if (ret != nullptr) { return *ret; } else { - MS_LOG(EXCEPTION) << "exec graph failed"; + MS_LOG(EXCEPTION) << "Exec graph failed"; } } void ExportDFGraph(const std::string& file_name, const std::string& phase) { diff --git a/mindspore/ccsrc/pipeline/resource.cc b/mindspore/ccsrc/pipeline/resource.cc index 59ee04ad156..18695518bef 100644 --- a/mindspore/ccsrc/pipeline/resource.cc +++ b/mindspore/ccsrc/pipeline/resource.cc @@ -189,7 +189,7 @@ Resource::~Resource() { // If exit normally, these global variables will be cleaned // in Resource::Clean call by MsPipeline::Compile, but if exit with MS_LOGEXCEPTION, // these global variables may not being cleaned, it may - // cause segmentfault when free python object inside these global varaibles + // cause segmentfault when free python object inside these global variables // after python interpreter got freed, so these global variables // are cleaned here. // So if exit normally, these global variable will be cleaned twice, diff --git a/mindspore/ccsrc/pipeline/static_analysis/abstract_value.cc b/mindspore/ccsrc/pipeline/static_analysis/abstract_value.cc index eef4e8b4adc..555a6d87c0a 100644 --- a/mindspore/ccsrc/pipeline/static_analysis/abstract_value.cc +++ b/mindspore/ccsrc/pipeline/static_analysis/abstract_value.cc @@ -443,7 +443,7 @@ bool AbstractTensor::operator==(const AbstractTensor &other) const { auto v1 = GetValueTrack(); auto v2 = other.GetValueTrack(); if (v1 == nullptr || v2 == nullptr) { - MS_LOG(EXCEPTION) << "the value of AbstractTensor is nullptr"; + MS_LOG(EXCEPTION) << "The value of AbstractTensor is nullptr"; } bool is_value_equal = (v1 == v2); diff --git a/mindspore/ccsrc/pipeline/static_analysis/abstract_value.h b/mindspore/ccsrc/pipeline/static_analysis/abstract_value.h index 9d9585bba3c..9e0dd82003a 100644 --- a/mindspore/ccsrc/pipeline/static_analysis/abstract_value.h +++ b/mindspore/ccsrc/pipeline/static_analysis/abstract_value.h @@ -77,7 +77,7 @@ class AbstractBase : public Base { } protected: - // default implementation, it can be overrided by subclass; + // default implementation, it can be overwritten by subclass; virtual ValuePtr RealBuildValue() const { return kAnyValue; } private: @@ -495,7 +495,7 @@ class AbstractNone : public AbstractBase { }; using AbstractNonePtr = std::shared_ptr; -// the un assgined state value for variable, which means the variable is not assigned +// the un assigned state value for variable, which means the variable is not assigned class AbstractNull : public AbstractBase { public: AbstractNull() : AbstractBase(kNullObj) { set_type(std::make_shared()); } diff --git a/mindspore/ccsrc/pipeline/static_analysis/evaluator.cc b/mindspore/ccsrc/pipeline/static_analysis/evaluator.cc index 251f218145d..9b120f731cc 100644 --- a/mindspore/ccsrc/pipeline/static_analysis/evaluator.cc +++ b/mindspore/ccsrc/pipeline/static_analysis/evaluator.cc @@ -116,7 +116,7 @@ AbstractBasePtrList FuncGraphEvaluator::NormalizeArgs(const AbstractBasePtrList return broaded_list; } - if (func_graph_->has_flag(kFuncGraphFlagUndetermin)) { + if (func_graph_->has_flag(kFuncGraphFlagUndetermined)) { if (parent_context_) { MS_LOG(DEBUG) << "Undeterminate FuncGraphEvaluator " << ToString() << ", context: " << parent_context_->ToString(); diff --git a/mindspore/ccsrc/pipeline/static_analysis/param_validator.cc b/mindspore/ccsrc/pipeline/static_analysis/param_validator.cc index 1b70e2fe22b..69f6af0dc0a 100644 --- a/mindspore/ccsrc/pipeline/static_analysis/param_validator.cc +++ b/mindspore/ccsrc/pipeline/static_analysis/param_validator.cc @@ -56,7 +56,7 @@ TypePtr CheckTensorDType(const AbstractTensorPtr &tensor, const TypePtrList &acc } TypePtr ele_type = tensor->element()->BuildType(); if (ele_type == nullptr) { - MS_LOG(EXCEPTION) << "abstract tensor element type nullptr"; + MS_LOG(EXCEPTION) << "Abstract tensor element type nullptr"; } return CheckType(ele_type, accepts, error_message_prefix); } @@ -64,7 +64,7 @@ TypePtr CheckTensorDType(const AbstractTensorPtr &tensor, const TypePtrList &acc TypePtr CheckTensorsDTypeSame(const AbstractTensorPtrList &tensor_list, const TypePtrList &accepts, const std::string &error_message_prefix) { if (tensor_list.empty()) { - MS_LOG(EXCEPTION) << "array list is empty"; + MS_LOG(EXCEPTION) << "Array list is empty"; } auto sample_tensor = tensor_list[0]; @@ -78,7 +78,7 @@ TypePtr CheckTensorsDTypeSame(const AbstractTensorPtrList &tensor_list, const Ty auto aType = tensor_list[index]->element()->BuildType(); loginfoBuffer << " " << aType->ToString(); if (sample_type->type_id() != aType->type_id()) { - MS_LOG(EXCEPTION) << "expected type " << sample_type->ToString() << ", but got " << aType->ToString() + MS_LOG(EXCEPTION) << "Expected type " << sample_type->ToString() << ", but got " << aType->ToString() << ", index " << index; } } @@ -89,11 +89,11 @@ TypePtr CheckTensorsDTypeSame(const AbstractTensorPtrList &tensor_list, const Ty TypePtr CheckScalarType(const AbstractScalarPtr &scalar, const TypePtrList &accepts, const std::string &error_message_prefix) { if (scalar == nullptr) { - MS_LOG(EXCEPTION) << "scalar nullptr"; + MS_LOG(EXCEPTION) << "Scalar nullptr"; } auto type = scalar->BuildType(); if (type == nullptr) { - MS_LOG(EXCEPTION) << "scalar value nullptr"; + MS_LOG(EXCEPTION) << "Scalar value nullptr"; } return CheckType(type, accepts, error_message_prefix); diff --git a/mindspore/ccsrc/pipeline/static_analysis/prim.cc b/mindspore/ccsrc/pipeline/static_analysis/prim.cc index 4110f258110..56bcd77f671 100644 --- a/mindspore/ccsrc/pipeline/static_analysis/prim.cc +++ b/mindspore/ccsrc/pipeline/static_analysis/prim.cc @@ -182,30 +182,30 @@ AbstractBasePtr DoSignatureEvaluator::Run(AnalysisEnginePtr engine, const Config static AbstractBasePtrList GetUnpackGraphSpecArgsList(AbstractBasePtrList args_spec_list, bool need_unpack) { // arg[0] is the func graph to unpack, ignore it - AbstractBasePtrList sepcialize_args_before_unpack(args_spec_list.begin() + 1, args_spec_list.end()); - AbstractBasePtrList graph_sepcialize_args; + AbstractBasePtrList specialize_args_before_unpack(args_spec_list.begin() + 1, args_spec_list.end()); + AbstractBasePtrList graph_specialize_args; if (need_unpack) { - for (size_t index = 0; index < sepcialize_args_before_unpack.size(); index++) { - MS_EXCEPTION_IF_NULL(sepcialize_args_before_unpack[index]); - if (sepcialize_args_before_unpack[index]->isa()) { - AbstractTuplePtr arg_tuple = sepcialize_args_before_unpack[index]->cast(); + for (size_t index = 0; index < specialize_args_before_unpack.size(); index++) { + MS_EXCEPTION_IF_NULL(specialize_args_before_unpack[index]); + if (specialize_args_before_unpack[index]->isa()) { + AbstractTuplePtr arg_tuple = specialize_args_before_unpack[index]->cast(); std::transform(arg_tuple->elements().begin(), arg_tuple->elements().end(), - std::back_inserter(graph_sepcialize_args), [](AbstractBasePtr abs) { return abs; }); - } else if (sepcialize_args_before_unpack[index]->isa()) { - AbstractDictionaryPtr arg_dict = sepcialize_args_before_unpack[index]->cast(); + std::back_inserter(graph_specialize_args), [](AbstractBasePtr abs) { return abs; }); + } else if (specialize_args_before_unpack[index]->isa()) { + AbstractDictionaryPtr arg_dict = specialize_args_before_unpack[index]->cast(); auto dict_elems = arg_dict->elements(); (void)std::transform( - dict_elems.begin(), dict_elems.end(), std::back_inserter(graph_sepcialize_args), + dict_elems.begin(), dict_elems.end(), std::back_inserter(graph_specialize_args), [](const AbstractAttribute &item) { return std::make_shared(item.first, item.second); }); } else { MS_LOG(EXCEPTION) << "UnpackGraph require args should be tuple or dict, but got " - << sepcialize_args_before_unpack[index]->ToString(); + << specialize_args_before_unpack[index]->ToString(); } } } else { - graph_sepcialize_args = sepcialize_args_before_unpack; + graph_specialize_args = specialize_args_before_unpack; } - return graph_sepcialize_args; + return graph_specialize_args; } AbstractBasePtr UnpackGraphEvaluator::Run(AnalysisEnginePtr engine, const ConfigPtrList &args_conf_list, @@ -239,14 +239,14 @@ AbstractBasePtr UnpackGraphEvaluator::Run(AnalysisEnginePtr engine, const Config MS_EXCEPTION_IF_NULL(real_fn); FuncGraphPtr forward_graph = real_fn->func_graph(); MS_EXCEPTION_IF_NULL(forward_graph); - AbstractBasePtrList graph_sepcialize_args = + AbstractBasePtrList graph_specialize_args = GetUnpackGraphSpecArgsList(args_spec_list, unpack_graph->need_unpack_args()); - AbstractBasePtrList graph_sepcialize_args_without_sens; - (void)std::transform(graph_sepcialize_args.begin(), - graph_sepcialize_args.end() - (unpack_graph->with_sens_in_args() ? 1 : 0), - std::back_inserter(graph_sepcialize_args_without_sens), [](AbstractBasePtr abs) { return abs; }); - auto new_graph = forward_graph->GenerateGraph(graph_sepcialize_args_without_sens); + AbstractBasePtrList graph_specialize_args_without_sens; + (void)std::transform(graph_specialize_args.begin(), + graph_specialize_args.end() - (unpack_graph->with_sens_in_args() ? 1 : 0), + std::back_inserter(graph_specialize_args_without_sens), [](AbstractBasePtr abs) { return abs; }); + auto new_graph = forward_graph->GenerateGraph(graph_specialize_args_without_sens); engine->func_graph_manager()->AddFuncGraph(new_graph); ScopePtr scope = kDefaultScope; if (out_conf != nullptr) { @@ -635,8 +635,8 @@ AbstractBasePtr GetEvaluatedValueForClassAttrOrMethod(const AnalysisEnginePtr &e MS_LOG(EXCEPTION) << "Attribute type error"; } std::string item_name = item_v->cast()->value(); - MS_LOG(DEBUG) << "Resovle name: " << cls->tag().name(); - MS_LOG(DEBUG) << "Resovle item: " << item_name; + MS_LOG(DEBUG) << "Resolve name: " << cls->tag().name(); + MS_LOG(DEBUG) << "Resolve item: " << item_name; AbstractBasePtr attr = cls->GetAttribute(item_name); if (attr != nullptr) { @@ -720,7 +720,7 @@ class EmbedEvaluator : public SymbolicPrimEvaluator { ~EmbedEvaluator() override = default; MS_DECLARE_PARENT(EmbedEvaluator, SymbolicPrimEvaluator); AbstractBasePtr EvalPrim(const ConfigPtrList &args_conf_list) override { - // arg: free variable to be embeded + // arg: free variable to be embedded if (args_conf_list.size() != 1) { MS_LOG(EXCEPTION) << "EmbedEvaluator requires 1 parameter, but got " << args_conf_list.size(); } @@ -939,7 +939,7 @@ class PartialEvaluator : public Evaluator { AbstractBasePtr Run(AnalysisEnginePtr engine, const ConfigPtrList &args_conf_list, AnfNodeConfigPtr out_conf = nullptr) override { if (args_conf_list.size() == 0) { - MS_LOG(EXCEPTION) << "args size should be greater than 0"; + MS_LOG(EXCEPTION) << "Args size should be greater than 0"; } auto arg0_value = args_conf_list[0]->GetEvaluatedValue(); AbstractBasePtrList args_spec_list{arg0_value}; diff --git a/mindspore/ccsrc/pipeline/static_analysis/program_specialize.cc b/mindspore/ccsrc/pipeline/static_analysis/program_specialize.cc index bfa1e43ceb0..987c5d1db03 100644 --- a/mindspore/ccsrc/pipeline/static_analysis/program_specialize.cc +++ b/mindspore/ccsrc/pipeline/static_analysis/program_specialize.cc @@ -190,7 +190,7 @@ void FuncGraphSpecializer::FirstPass() { } if (node->func_graph() != func_graph_) { if (parent_ == nullptr) { - MS_LOG(EXCEPTION) << "parent must not null NodeInfo: " << trace::GetDebugInfo(node->debug_info()); + MS_LOG(EXCEPTION) << "Parent must not null NodeInfo: " << trace::GetDebugInfo(node->debug_info()); } parent_->AddTodoItem(node); parent_->FirstPass(); @@ -365,16 +365,16 @@ AnfNodePtr FuncGraphSpecializer::BuildSpecializedNodeInner(const AbstractBasePtr } if (!eval->isa()) { - MS_LOG(EXCEPTION) << "eval is not BaseGraphEvaluator, but " << eval->ToString(); + MS_LOG(EXCEPTION) << "Eval is not BaseGraphEvaluator, but " << eval->ToString(); } auto real_eval = dyn_cast(eval); if (func->context() != nullptr) { if (!IsVisible(func_graph_, func->context()->func_graph())) { - MS_LOG(EXCEPTION) << "func is not visible NodeInfo: " << trace::GetDebugInfo(func_graph_->debug_info()); + MS_LOG(EXCEPTION) << "Func is not visible NodeInfo: " << trace::GetDebugInfo(func_graph_->debug_info()); } } else { - MS_LOG(EXCEPTION) << "func context is nullptr NodeInfo: " << trace::GetDebugInfo(func_graph_->debug_info()); + MS_LOG(EXCEPTION) << "Func context is nullptr NodeInfo: " << trace::GetDebugInfo(func_graph_->debug_info()); } AnalysisContextPtr context = real_eval->MakeContext(engine_, argvals); MS_LOG(DEBUG) << "Specialize function graph: " << context->func_graph()->ToString() << ", args: " << argvals.size() @@ -556,7 +556,7 @@ SpecializeStatusCode FuncGraphSpecializer::FindUniqueArgvals(const AbstractFunct if (!result->first.empty()) { return kSpecializeSuccess; } - MS_LOG(DEBUG) << "Find POLY code, it may be unused code or unresoved polymorphism."; + MS_LOG(DEBUG) << "Find POLY code, it may be unused code or unresolved polymorphism."; return kSpecializeFindUniqueArgvalPoly; } } diff --git a/mindspore/ccsrc/pipeline/static_analysis/static_analysis.cc b/mindspore/ccsrc/pipeline/static_analysis/static_analysis.cc index 49182e8d09a..4ab7b9d20ba 100644 --- a/mindspore/ccsrc/pipeline/static_analysis/static_analysis.cc +++ b/mindspore/ccsrc/pipeline/static_analysis/static_analysis.cc @@ -89,7 +89,7 @@ std::size_t AnfNodeConfigHasher::operator()(const AnfNodeConfigPtr conf) const { MS_EXCEPTION_IF_NULL(conf->node()); std::size_t hash_value = hash_combine(conf->node()->hash(), conf->context()->hash()); if (conf->context() != nullptr && conf->context()->func_graph() != nullptr) { - MS_LOG(DEBUG) << "NodeConfgHasher Node: " << conf->node()->DebugString() + MS_LOG(DEBUG) << "NodeConfigHasher Node: " << conf->node()->DebugString() << ", Graph: " << conf->context()->func_graph()->ToString() << " ### , hash value: " << hash_value; } else { MS_LOG(DEBUG) << "NodeConfigHasher Node: " << conf->node()->DebugString() << " ### , hash value: " << hash_value; @@ -456,13 +456,13 @@ AbstractBasePtr AnalysisEngine::ExecuteMultipleEvaluators(const std::vectorcast(); if (fg_eval) { - auto undetermin_fgs = fg_eval->func_graph()->recursive_graphs(); - if (undetermin_fgs) { - for (auto undetermin_fg : *undetermin_fgs) { - MS_LOG(DEBUG) << "Set graph undetermin: " << undetermin_fg->ToString(); + auto undetermined_fgs = fg_eval->func_graph()->recursive_graphs(); + if (undetermined_fgs) { + for (auto undetermined_fg : *undetermined_fgs) { + MS_LOG(DEBUG) << "Set graph undetermined: " << undetermined_fg->ToString(); // As the current evaluator has multiple possibles, all the func_graphs which // are recursive with the current func_graph are undetermined in control flow. - undetermin_fg->set_flags(kFuncGraphFlagUndetermin, true); + undetermined_fg->set_flags(kFuncGraphFlagUndetermined, true); } } }