diff --git a/mindspore/ccsrc/backend/optimizer/pass/common_subexpression_elimination.cc b/mindspore/ccsrc/backend/optimizer/pass/common_subexpression_elimination.cc index 40182b8797f..bb5323097a3 100644 --- a/mindspore/ccsrc/backend/optimizer/pass/common_subexpression_elimination.cc +++ b/mindspore/ccsrc/backend/optimizer/pass/common_subexpression_elimination.cc @@ -105,6 +105,7 @@ bool BackendCSE::CheckReplace(const AnfNodePtr &main, const AnfNodePtr &node, bo bool CommonSubexpressionElimination::Run(const FuncGraphPtr &func_graph) { MS_EXCEPTION_IF_NULL(func_graph); auto backend_cse = std::make_shared(); + MS_EXCEPTION_IF_NULL(backend_cse); return backend_cse->Cse(func_graph, func_graph->manager()); } } // namespace opt diff --git a/mindspore/ccsrc/backend/optimizer/pass/communication_op_fusion.cc b/mindspore/ccsrc/backend/optimizer/pass/communication_op_fusion.cc index 7ff1f599c57..aed79184d0e 100644 --- a/mindspore/ccsrc/backend/optimizer/pass/communication_op_fusion.cc +++ b/mindspore/ccsrc/backend/optimizer/pass/communication_op_fusion.cc @@ -361,6 +361,7 @@ bool CommunicationOpFusion::DoFusion(const FuncGraphPtr &func_graph, const Commu continue; } auto kernel_graph = func_graph->cast(); + MS_EXCEPTION_IF_NULL(kernel_graph); auto graph_id = kernel_graph->graph_id(); AnfNodePtr new_communication_op = CreateFusedCommunicationOp(func_graph, communication_op_info, start_index, end_index); diff --git a/mindspore/ccsrc/backend/optimizer/pass/convert_attr_to_unify_mindir.cc b/mindspore/ccsrc/backend/optimizer/pass/convert_attr_to_unify_mindir.cc index 36e06a1660f..90d92038810 100644 --- a/mindspore/ccsrc/backend/optimizer/pass/convert_attr_to_unify_mindir.cc +++ b/mindspore/ccsrc/backend/optimizer/pass/convert_attr_to_unify_mindir.cc @@ -29,11 +29,14 @@ const AnfNodePtr ConvertAttrToUnifyMindIR::Process(const FuncGraphPtr &, const A return nullptr; } - CNodePtr cnode = node->cast(); + auto cnode = node->cast(); + MS_EXCEPTION_IF_NULL(cnode); auto inputs = cnode->inputs(); AnfNodePtr op = inputs[0]; + MS_EXCEPTION_IF_NULL(op); if (IsValueNode(op)) { auto prim = GetValueNode(op); + MS_EXCEPTION_IF_NULL(prim); auto attrs = prim->attrs(); std::string type_name = prim->name(); for (auto attr : attrs) { diff --git a/mindspore/ccsrc/backend/optimizer/pass/convert_const_input_to_attr.cc b/mindspore/ccsrc/backend/optimizer/pass/convert_const_input_to_attr.cc index 1a7b3541389..02316be0e11 100644 --- a/mindspore/ccsrc/backend/optimizer/pass/convert_const_input_to_attr.cc +++ b/mindspore/ccsrc/backend/optimizer/pass/convert_const_input_to_attr.cc @@ -34,7 +34,8 @@ const AnfNodePtr ConvertConstInputToAttr::Process(const FuncGraphPtr &, const An return nullptr; } - CNodePtr cnode = node->cast(); + auto cnode = node->cast(); + MS_EXCEPTION_IF_NULL(cnode); ConstInputToAttrInfoRegister reg; if (!ConstInputToAttrInfoRegistry::Instance().GetRegisterByOpName(AnfAlgo::GetCNodeName(cnode), ®)) { return nullptr; @@ -47,6 +48,7 @@ const AnfNodePtr ConvertConstInputToAttr::Process(const FuncGraphPtr &, const An } if (AnfAlgo::GetCNodeName(cnode) == prim::kPrimGatherD->name()) { auto ms_context = MsContext::GetInstance(); + MS_EXCEPTION_IF_NULL(ms_context); if (ms_context->get_param(MS_CTX_DEVICE_TARGET) != kGPUDevice) { return nullptr; } diff --git a/mindspore/ccsrc/backend/optimizer/pass/convert_const_scalar_to_tensor.cc b/mindspore/ccsrc/backend/optimizer/pass/convert_const_scalar_to_tensor.cc index f9a93d25a1c..1177f5e3d91 100644 --- a/mindspore/ccsrc/backend/optimizer/pass/convert_const_scalar_to_tensor.cc +++ b/mindspore/ccsrc/backend/optimizer/pass/convert_const_scalar_to_tensor.cc @@ -68,6 +68,7 @@ const AnfNodePtr ConvertConstScalarToTensor::Process(const FuncGraphPtr &func_gr return nullptr; } auto cnode = node->cast(); + MS_EXCEPTION_IF_NULL(cnode); bool input_changed = false; for (size_t i = 0; i < cnode->inputs().size(); ++i) { auto new_input = CreateTensorInput(func_graph->cast(), cnode->inputs()[i]); diff --git a/mindspore/ccsrc/backend/optimizer/pass/convert_tuple_output_to_maketuple.cc b/mindspore/ccsrc/backend/optimizer/pass/convert_tuple_output_to_maketuple.cc index 275a138407e..ad6f48290b1 100644 --- a/mindspore/ccsrc/backend/optimizer/pass/convert_tuple_output_to_maketuple.cc +++ b/mindspore/ccsrc/backend/optimizer/pass/convert_tuple_output_to_maketuple.cc @@ -38,11 +38,12 @@ AnfNodePtr ConvertTupleInputToMakeTuple(const FuncGraphPtr &graph, const AnfNode if (anf_graph != nullptr) { kernel_graph = anf_graph->cast(); } - + MS_EXCEPTION_IF_NULL(kernel_graph); if (kernel_graph->FindTupleParameterToMakeTupleMap(tuple_anf)) { return kernel_graph->FindTupleParameterToMakeTupleMap(tuple_anf); } auto make_tuple = kernel_graph->TransTupleToMakeTuple(tuple_anf); + MS_EXCEPTION_IF_NULL(make_tuple); kernel_graph->InsertTupleParameterToMakeTupleMap(tuple_anf, make_tuple); // replace graph inputs if input is a parameter kernel_graph->ReplaceGraphInput(tuple_anf, make_tuple); diff --git a/mindspore/ccsrc/backend/optimizer/pass/getitem_tuple.cc b/mindspore/ccsrc/backend/optimizer/pass/getitem_tuple.cc index 540310ab9a6..54dc5cfc4c7 100644 --- a/mindspore/ccsrc/backend/optimizer/pass/getitem_tuple.cc +++ b/mindspore/ccsrc/backend/optimizer/pass/getitem_tuple.cc @@ -26,7 +26,7 @@ namespace { bool IsC(const BaseRef &n) { MS_EXCEPTION_IF_NULL(n); if (utils::isa(n)) { - AnfNodePtr in = utils::cast(n); + auto in = utils::cast(n); MS_EXCEPTION_IF_NULL(in); return in->isa(); } else { @@ -43,7 +43,7 @@ const BaseRef GetitemTuple::DefinePattern() const { const AnfNodePtr GetitemTuple::Process(const FuncGraphPtr &, const AnfNodePtr &node, const EquivPtr &) const { MS_EXCEPTION_IF_NULL(node); - CNodePtr tuple_getitem = node->cast(); + auto tuple_getitem = node->cast(); MS_EXCEPTION_IF_NULL(tuple_getitem); CheckCNodeInputSize(tuple_getitem, kTupleGetItemInputTensorNum); AnfNodePtr make_tuple_anf = tuple_getitem->input(kRealInputNodeIndexInTupleGetItem); @@ -51,10 +51,10 @@ const AnfNodePtr GetitemTuple::Process(const FuncGraphPtr &, const AnfNodePtr &n AnfNodePtr index_node = tuple_getitem->input(kInputNodeOutputIndexInTupleGetItem); MS_EXCEPTION_IF_NULL(index_node); if (IsValueNode(index_node)) { - ValueNodePtr value_node = index_node->cast(); + auto value_node = index_node->cast(); MS_EXCEPTION_IF_NULL(value_node); auto index = GetValue(value_node->value()); - CNodePtr make_tuple = make_tuple_anf->cast(); + auto make_tuple = make_tuple_anf->cast(); MS_EXCEPTION_IF_NULL(make_tuple); if (make_tuple->inputs().size() > LongToSize(index + 1)) { auto ret = make_tuple->input(LongToSize(index + 1)); diff --git a/mindspore/ccsrc/backend/optimizer/pass/optimize_dependence.cc b/mindspore/ccsrc/backend/optimizer/pass/optimize_dependence.cc index b6ad9e7e812..e8f311c04d3 100644 --- a/mindspore/ccsrc/backend/optimizer/pass/optimize_dependence.cc +++ b/mindspore/ccsrc/backend/optimizer/pass/optimize_dependence.cc @@ -60,6 +60,7 @@ CNodePtr CheckIsolatedVirtualNode(const CNodePtr &cnode) { return nullptr; } auto real_input_op = AnfAlgo::GetInputNode(cnode, kIsolatedDependRealInputIndex); + MS_EXCEPTION_IF_NULL(real_input_op); if (!real_input_op->isa()) { return nullptr; } @@ -210,6 +211,7 @@ const AnfNodePtr OptimizeDependence::GetConvertNode(const FuncGraphPtr &graph, c MS_EXCEPTION_IF_NULL(graph); MS_EXCEPTION_IF_NULL(node); auto depend_cnode = node->cast(); + MS_EXCEPTION_IF_NULL(depend_cnode); auto replacing_node = depend_cnode->input(index); MS_EXCEPTION_IF_NULL(replacing_node); if (!replacing_node->isa()) { diff --git a/mindspore/ccsrc/backend/optimizer/pass/optimize_updatestate.cc b/mindspore/ccsrc/backend/optimizer/pass/optimize_updatestate.cc index 30bff16acbc..20cae4c1fa2 100644 --- a/mindspore/ccsrc/backend/optimizer/pass/optimize_updatestate.cc +++ b/mindspore/ccsrc/backend/optimizer/pass/optimize_updatestate.cc @@ -35,6 +35,8 @@ const BaseRef OptimizeUpdateState::DefinePattern() const { const AnfNodePtr OptimizeUpdateState::Process(const FuncGraphPtr &func_graph, const AnfNodePtr &node, const EquivPtr &) const { + MS_EXCEPTION_IF_NULL(func_graph); + MS_EXCEPTION_IF_NULL(node); auto update_state = dyn_cast(node); MS_EXCEPTION_IF_NULL(update_state); if (update_state->size() <= kAdditionalAttachIndex) { diff --git a/mindspore/ccsrc/backend/optimizer/pass/replace_node_by_proxy.cc b/mindspore/ccsrc/backend/optimizer/pass/replace_node_by_proxy.cc index d4fe31013f1..142f5588cb1 100644 --- a/mindspore/ccsrc/backend/optimizer/pass/replace_node_by_proxy.cc +++ b/mindspore/ccsrc/backend/optimizer/pass/replace_node_by_proxy.cc @@ -60,7 +60,8 @@ bool ReplaceNodeByProxy::Run(const FuncGraphPtr &func_graph) { for (auto node : node_list) { if (node != nullptr && node->isa() && AnfAlgo::GetCNodeName(node) == kEmbeddingLookupOpName) { TraceGuard guard(std::make_shared(node->debug_info())); - CNodePtr cnode = node->cast(); + auto cnode = node->cast(); + MS_EXCEPTION_IF_NULL(cnode); auto prim = std::make_shared(kEmbeddingLookupProxyOpName); MS_EXCEPTION_IF_NULL(prim); std::vector proxy_inputs = {NewValueNode(prim)};