From 195779d7253648f72c0164f9e594b64e7ecd0091 Mon Sep 17 00:00:00 2001 From: tangxl Date: Thu, 2 Nov 2023 19:05:32 +0800 Subject: [PATCH] adapt ge print --- .../ascend/hal/device/tensorprint_utils.cc | 1 + .../optimizer/ge/adjust_print_for_ge.cc | 34 ++- .../ge/print_to_stringformat_print.cc | 231 ------------------ .../ge/print_to_stringformat_print.h | 35 --- .../optimizer/ge_backend_optimization.cc | 2 - mindspore/ccsrc/transform/graph_ir/convert.cc | 14 -- mindspore/ccsrc/transform/graph_ir/convert.h | 1 - .../graph_ir/op_declare/array_ops_declare.cc | 3 +- .../op_declare/logging_ops_declare.cc | 5 - .../st/fallback/test_graph_fallback_print.py | 2 +- 10 files changed, 34 insertions(+), 294 deletions(-) delete mode 100644 mindspore/ccsrc/plugin/device/ascend/optimizer/ge/print_to_stringformat_print.cc delete mode 100644 mindspore/ccsrc/plugin/device/ascend/optimizer/ge/print_to_stringformat_print.h diff --git a/mindspore/ccsrc/plugin/device/ascend/hal/device/tensorprint_utils.cc b/mindspore/ccsrc/plugin/device/ascend/hal/device/tensorprint_utils.cc index c8d8ce63ac8..5946ec42549 100644 --- a/mindspore/ccsrc/plugin/device/ascend/hal/device/tensorprint_utils.cc +++ b/mindspore/ccsrc/plugin/device/ascend/hal/device/tensorprint_utils.cc @@ -16,6 +16,7 @@ #include "plugin/device/ascend/hal/device/tensorprint_utils.h" #include #include +#include #include #include #include diff --git a/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/adjust_print_for_ge.cc b/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/adjust_print_for_ge.cc index 25b1ebd5e36..dc07d99a3e1 100644 --- a/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/adjust_print_for_ge.cc +++ b/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/adjust_print_for_ge.cc @@ -21,17 +21,31 @@ #include #include "ops/framework_ops.h" #include "ops/sequence_ops.h" +#include "include/common/utils/anfalgo.h" namespace mindspore { namespace opt { namespace { constexpr size_t kIndexOne = 1; constexpr size_t kInputSizeTwo = 2; + +bool PrintUnvisited(const BaseRef &ref) { + if (utils::isa(ref)) { + auto node = utils::cast(ref); + MS_EXCEPTION_IF_NULL(node); + if (!IsPrimitive(node, prim::kPrimPrint)) { + return false; + } + return UnVisited(ref); + } + return false; +} } // namespace const BaseRef AdjustPrintForGe::DefinePattern() const { + VarPtr V = std::make_shared(PrintUnvisited); VarPtr Xs = std::make_shared(); - return VectorRef({prim::kPrimPrint, Xs}); + return VectorRef({V, Xs}); } // replace print(i1, i2, U) with 1. print() 2. depend(0.0, print) @@ -39,6 +53,7 @@ const AnfNodePtr AdjustPrintForGe::Process(const FuncGraphPtr &func_graph, const const EquivPtr &) const { MS_EXCEPTION_IF_NULL(func_graph); MS_EXCEPTION_IF_NULL(node); + common::AnfAlgo::SetNodeAttr(kAttrVisited, MakeValue(true), node); auto cnode = node->cast(); MS_EXCEPTION_IF_NULL(cnode); const std::vector &inputs = cnode->inputs(); @@ -46,22 +61,35 @@ const AnfNodePtr AdjustPrintForGe::Process(const FuncGraphPtr &func_graph, const return nullptr; } std::vector make_tuple_inputs{NewValueNode(std::make_shared(kMakeTupleOpName))}; - make_tuple_inputs.insert(make_tuple_inputs.end(), inputs.begin() + kIndexOne, inputs.end()); + make_tuple_inputs.insert(make_tuple_inputs.end(), inputs.begin() + kIndexOne, inputs.end() - 1); auto make_tuple_node = func_graph->NewCNode(make_tuple_inputs); MS_EXCEPTION_IF_NULL(make_tuple_node); std::vector abstract_list; - for (size_t input_index = kIndexOne; input_index < inputs.size(); ++input_index) { + for (size_t input_index = kIndexOne; input_index < inputs.size() - 1; ++input_index) { auto input_node = inputs.at(input_index); MS_EXCEPTION_IF_NULL(input_node); (void)abstract_list.emplace_back(input_node->abstract()); } make_tuple_node->set_abstract(std::make_shared(abstract_list)); std::vector new_print_inputs{NewValueNode(std::make_shared(kPrintOpName))}; + auto tensor_name = "print"; + AnfNodePtr input_tensor_name = NewValueNode(std::make_shared(tensor_name)); + input_tensor_name->set_abstract(std::make_shared(kString)); + (void)new_print_inputs.emplace_back(input_tensor_name); (void)new_print_inputs.emplace_back(make_tuple_node); + (void)new_print_inputs.emplace_back(inputs.at(inputs.size() - 1)); auto new_print_node = func_graph->NewCNode(new_print_inputs); MS_EXCEPTION_IF_NULL(new_print_node); new_print_node->set_abstract(node->abstract()); + auto primitive = GetCNodePrimitive(new_print_node); + MS_EXCEPTION_IF_NULL(primitive); + primitive = primitive->Clone(); + MS_EXCEPTION_IF_NULL(primitive); + (void)primitive->AddAttr("channel_name", MakeValue("_npu_log")); + new_print_node->set_input(0, std::make_shared(primitive)); + common::AnfAlgo::SetNodeAttr(kAttrVisited, MakeValue(true), new_print_node); + auto tensor = std::make_shared(0.0); auto kernel_graph = func_graph->cast(); MS_EXCEPTION_IF_NULL(kernel_graph); diff --git a/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/print_to_stringformat_print.cc b/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/print_to_stringformat_print.cc deleted file mode 100644 index ed9bd232128..00000000000 --- a/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/print_to_stringformat_print.cc +++ /dev/null @@ -1,231 +0,0 @@ -/** - * Copyright 2023 Huawei Technologies Co., Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "plugin/device/ascend/optimizer/ge/print_to_stringformat_print.h" - -#include -#include -#include -#include -#include "ops/framework_ops.h" -#include "ops/sequence_ops.h" -#include "include/backend/anf_runtime_algorithm.h" -#include "include/common/utils/anfalgo.h" -#include "ir/dtype.h" -#include "utils/check_convert_utils.h" - -namespace mindspore { -namespace opt { -namespace { -const char kSeparator[] = ", "; -const char kShapePrefix[] = "["; -const char kShapeSuffix[] = "]"; -const char kEmptyShape[] = "[]"; - -std::string GetTensorShape(const AnfNodePtr &node) { - MS_EXCEPTION_IF_NULL(node); - std::string shape_str = "shape="; - auto abstract_ptr = node->abstract(); - MS_EXCEPTION_IF_NULL(abstract_ptr); - auto shape_ptr = abstract_ptr->GetShapeTrack(); - if (shape_ptr == nullptr) { - MS_LOG(EXCEPTION) << "The shape of node " << node->fullname_with_scope() << " is nullptr"; - } - auto shape_vec = shape_ptr->cast()->shape(); - if (shape_vec.empty()) { - shape_str += kEmptyShape; - } else { - shape_str += kShapePrefix; - for (auto &shape : shape_vec) { - shape_str += std::to_string(shape); - shape_str += " "; - } - shape_str.erase(shape_str.find_last_not_of(" ") + 1); - shape_str += kShapeSuffix; - } - return shape_str; -} - -std::string GetTensorDtype(const AnfNodePtr &node) { - MS_EXCEPTION_IF_NULL(node); - std::string type_str = "dtype="; - auto type = node->Type(); - MS_EXCEPTION_IF_NULL(type); - MS_EXCEPTION_IF_NULL(dyn_cast(type)); - auto type_ptr = dyn_cast(type)->element(); - MS_EXCEPTION_IF_NULL(type_ptr); - auto type_id = type_ptr->type_id(); - type_str += TypeIdToString(type_id); - return type_str; -} - -CNodePtr CreateNewMakeTuple(const FuncGraphPtr &graph, const std::vector &make_tuple_inputs) { - MS_EXCEPTION_IF_NULL(graph); - std::vector new_make_tuple_inputs{NewValueNode(std::make_shared(kMakeTupleOpName))}; - std::vector abstract_list; - for (auto &make_tuple_input_node : make_tuple_inputs) { - MS_EXCEPTION_IF_NULL(make_tuple_input_node); - new_make_tuple_inputs.emplace_back(make_tuple_input_node); - abstract_list.emplace_back(make_tuple_input_node->abstract()); - } - - auto new_make_tuple_node = graph->NewCNode(new_make_tuple_inputs); - MS_EXCEPTION_IF_NULL(new_make_tuple_node); - new_make_tuple_node->set_abstract(std::make_shared(abstract_list)); - return new_make_tuple_node; -} - -CNodePtr CreateNewPrint(const FuncGraphPtr &graph, const CNodePtr &string_format_node, const CNodePtr &print_node) { - MS_EXCEPTION_IF_NULL(graph); - MS_EXCEPTION_IF_NULL(string_format_node); - MS_EXCEPTION_IF_NULL(print_node); - - std::vector new_print_inputs{NewValueNode(std::make_shared(kPrintOpName))}; - (void)new_print_inputs.emplace_back(string_format_node); - // Add IOMonad. - const CNodePtr &make_tuple_node = print_node->input(1)->cast(); - MS_EXCEPTION_IF_NULL(make_tuple_node); - const std::vector &inputs = make_tuple_node->inputs(); - new_print_inputs.emplace_back(inputs.at(inputs.size() - 1)); - - auto new_print_node = graph->NewCNode(new_print_inputs); - MS_EXCEPTION_IF_NULL(new_print_node); - new_print_node->set_abstract(string_format_node->abstract()); - - auto primitive = GetCNodePrimitive(new_print_node); - MS_EXCEPTION_IF_NULL(primitive); - primitive = primitive->Clone(); - MS_EXCEPTION_IF_NULL(primitive); - (void)primitive->AddAttr("output_stream", MakeValue("stdout")); - new_print_node->set_input(0, std::make_shared(primitive)); - return new_print_node; -} - -CNodePtr CreateShape(const FuncGraphPtr &graph, const AnfNodePtr &node) { - MS_EXCEPTION_IF_NULL(graph); - MS_EXCEPTION_IF_NULL(node); - - std::vector shape_inputs{NewValueNode(std::make_shared("TensorShape"))}; - (void)shape_inputs.emplace_back(node); - auto shape_node = graph->NewCNode(shape_inputs); - MS_EXCEPTION_IF_NULL(shape_node); - abstract::AbstractBasePtr abs; - auto node_abstract = node->abstract(); - MS_EXCEPTION_IF_NULL(node_abstract); - auto shape_map = CheckAndConvertUtils::ConvertShapePtrToShapeMap(node_abstract->GetShapeTrack()); - auto shape = shape_map[kShape]; - ShapeVector tensor_shp({static_cast(shape.size())}); - if (IsDynamic(shape)) { - if (IsDynamicRank(shape)) { - abs = abstract::MakeAbstract( - std::make_shared(std::vector{abstract::Shape::kShapeDimAny}), kInt64); - } else { - auto elem = std::make_shared(std::make_shared(), std::make_shared(64)); - auto abs_tensor = std::make_shared(elem, std::make_shared(tensor_shp)); - abs = abs_tensor; - } - } else { - auto shp_buf_size = sizeof(int64_t) * shape.size(); - auto tensor = std::make_shared(kNumberTypeInt64, tensor_shp, shape.data(), shp_buf_size); - abs = tensor->ToAbstract(); - } - shape_node->set_abstract(abs); - return shape_node; -} - -CNodePtr CreateStringFormat(const FuncGraphPtr &graph, const CNodePtr &print_node) { - MS_EXCEPTION_IF_NULL(graph); - MS_EXCEPTION_IF_NULL(print_node); - const CNodePtr &make_tuple_node = print_node->input(1)->cast(); - const std::vector &inputs = make_tuple_node->inputs(); - constexpr auto placeholder = "{}"; - std::string str_template = ""; - constexpr auto summarize = -1; - CNodePtr new_make_tuple_node; - std::vector make_tuple_inputs; - // Set node template attribute which StringFormat need. - for (size_t input_index = 1; input_index < inputs.size() - 1; ++input_index) { - auto input_node = inputs.at(input_index); - MS_EXCEPTION_IF_NULL(input_node); - if (IsValueNode(input_node)) { - auto valut_ptr = GetValueNode(input_node); - str_template = str_template + GetValue(valut_ptr) + "\n"; - } else { - if (common::AnfAlgo::CheckPrimitiveType(input_node, prim::kPrimMakeTuple)) { - new_make_tuple_node = input_node->cast(); - break; - } - std::string str_dtype; - auto abstract = input_node->abstract(); - MS_EXCEPTION_IF_NULL(abstract); - auto shape_ptr = abstract->GetShapeTrack()->cast(); - MS_EXCEPTION_IF_NULL(shape_ptr); - auto shape = shape_ptr->shape(); - // For dynamic shape input tensor, insert TensorShape ops to get real shape. - if (IsDynamic(shape)) { - auto shape_node = CreateShape(graph, input_node); - make_tuple_inputs.emplace_back(shape_node); - str_template = str_template + "Tensor(shape=" + placeholder; - } else { - auto str_shape = GetTensorShape(input_node); - str_template = str_template + "Tensor(" + str_shape; - } - str_dtype = GetTensorDtype(input_node); - str_template = str_template + kSeparator + str_dtype + kSeparator + "value=\n" + placeholder + ")\n"; - make_tuple_inputs.emplace_back(input_node); - } - } - if (!str_template.empty()) { - str_template.pop_back(); - } - if (new_make_tuple_node == nullptr) { - new_make_tuple_node = CreateNewMakeTuple(graph, make_tuple_inputs); - } - std::vector string_format_inputs{NewValueNode(std::make_shared("StringFormat"))}; - string_format_inputs.emplace_back(new_make_tuple_node); - auto string_format_node = graph->NewCNode(string_format_inputs); - MS_EXCEPTION_IF_NULL(string_format_node); - string_format_node->set_abstract(std::make_shared(kString)); - auto primitive = GetCNodePrimitive(string_format_node); - MS_EXCEPTION_IF_NULL(primitive); - primitive = primitive->Clone(); - MS_EXCEPTION_IF_NULL(primitive); - (void)primitive->AddAttr("template", MakeValue(str_template)); - (void)primitive->AddAttr("placeholder", MakeValue(placeholder)); - (void)primitive->AddAttr("summarize", MakeValue(summarize)); - string_format_node->set_input(0, std::make_shared(primitive)); - return string_format_node; -} -} // namespace - -const BaseRef PrintToStringFormatPrint::DefinePattern() const { - VarPtr Xs = std::make_shared(); - return VectorRef({prim::kPrimPrint, Xs}); -} - -const AnfNodePtr PrintToStringFormatPrint::Process(const FuncGraphPtr &func_graph, const AnfNodePtr &node, - const EquivPtr &) const { - MS_EXCEPTION_IF_NULL(func_graph); - MS_EXCEPTION_IF_NULL(node); - auto cnode = node->cast(); - MS_EXCEPTION_IF_NULL(cnode); - // convert Print to StringFormat and PrintV2 to adapt CANN - auto string_format_node = CreateStringFormat(func_graph, cnode); - auto new_print_node = CreateNewPrint(func_graph, string_format_node, cnode); - return new_print_node; -} -} // namespace opt -} // namespace mindspore diff --git a/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/print_to_stringformat_print.h b/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/print_to_stringformat_print.h deleted file mode 100644 index 0bfc86627eb..00000000000 --- a/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/print_to_stringformat_print.h +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Copyright 2023 Huawei Technologies Co., Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef MINDSPORE_CCSRC_PLUGIN_DEVICE_ASCEND_OPTIMIZER_GE_PRINT_TO_STRINGFORMAT_PRINT_H_ -#define MINDSPORE_CCSRC_PLUGIN_DEVICE_ASCEND_OPTIMIZER_GE_PRINT_TO_STRINGFORMAT_PRINT_H_ - -#include "include/backend/optimizer/optimizer.h" - -namespace mindspore { -namespace opt { -class PrintToStringFormatPrint : public PatternProcessPass { - public: - explicit PrintToStringFormatPrint(bool multigraph = true) - : PatternProcessPass("print_to_stringformat_print", multigraph) {} - ~PrintToStringFormatPrint() override = default; - - const BaseRef DefinePattern() const override; - const AnfNodePtr Process(const FuncGraphPtr &, const AnfNodePtr &node, const EquivPtr &) const override; -}; -} // namespace opt -} // namespace mindspore -#endif // MINDSPORE_CCSRC_PLUGIN_DEVICE_ASCEND_OPTIMIZER_GE_PRINT_TO_STRINGFORMAT_PRINT_H_ diff --git a/mindspore/ccsrc/plugin/device/ascend/optimizer/ge_backend_optimization.cc b/mindspore/ccsrc/plugin/device/ascend/optimizer/ge_backend_optimization.cc index f3c56be9b56..b09038ae710 100644 --- a/mindspore/ccsrc/plugin/device/ascend/optimizer/ge_backend_optimization.cc +++ b/mindspore/ccsrc/plugin/device/ascend/optimizer/ge_backend_optimization.cc @@ -40,7 +40,6 @@ #include "plugin/device/ascend/optimizer/ge/trans_depend_value_to_int32.h" #include "plugin/device/ascend/optimizer/ge/insert_identity.h" #include "plugin/device/ascend/optimizer/ge/dropout_gen_mask_depend.h" -#include "plugin/device/ascend/optimizer/ge/print_to_stringformat_print.h" #include "plugin/device/ascend/optimizer/ge/unfold_maketuple.h" #include "plugin/device/ascend/optimizer/ge/unfold_nested_output.h" #include "plugin/device/ascend/optimizer/ge/resize_bilinear_add_attr.h" @@ -84,7 +83,6 @@ void GEBackendOptimization(const KernelGraphPtr &kernel_graph) { opt_ge_pm->AddPass(std::make_shared()); opt_ge_pm->AddPass(std::make_shared()); opt_ge_pm->AddPass(std::make_shared()); - opt_ge_pm->AddPass(std::make_shared()); opt_ge_pm->AddPass(std::make_shared()); opt_ge_pm->AddPass(std::make_shared()); opt_ge_pm->AddPass(std::make_shared()); diff --git a/mindspore/ccsrc/transform/graph_ir/convert.cc b/mindspore/ccsrc/transform/graph_ir/convert.cc index e8a7e65218c..e02bee0237e 100644 --- a/mindspore/ccsrc/transform/graph_ir/convert.cc +++ b/mindspore/ccsrc/transform/graph_ir/convert.cc @@ -3417,18 +3417,6 @@ void DfGraphConvertor::ConvertParallelGroupToHcom(const CNodePtr &node) { op_cache_[node.get()] = op; } -void DfGraphConvertor::ConvertPrint(const CNodePtr &node) { - MS_EXCEPTION_IF_NULL(node); - OpAdapterPtr adpt = FindAdapter(node, training_); - if (adpt == nullptr) { - return; - } - auto op = adpt->generate(node); - MS_EXCEPTION_IF_NULL(op); - (void)op->SetAttr("_kernel", "extend"); - op_cache_[node.get()] = op; -} - void DfGraphConvertor::ConvertLoad(const CNodePtr &node) { auto nodes = node->inputs(); bool need_constant = false; @@ -3664,8 +3652,6 @@ bool DfGraphConvertor::CheckCNode(const std::string &name, const CNodePtr node) {kNameConv2DBackpropInputV2, &DfGraphConvertor::ConvertConv2D}, {prim::kPrimConv2DBackpropInput->name(), &DfGraphConvertor::ConvertConv2D}, {prim::kPrimConv2DBackpropFilter->name(), &DfGraphConvertor::ConvertConv2D}, - // Add attr '_kernel' to select AICPU Print ops. - {prim::kPrimPrint->name(), &DfGraphConvertor::ConvertPrint}, // Add attr 'N' to DynamicStitch {prim::kPrimDynamicStitch->name(), &DfGraphConvertor::ConvertDynamicStitch}, // Convert hccl op for comm handle diff --git a/mindspore/ccsrc/transform/graph_ir/convert.h b/mindspore/ccsrc/transform/graph_ir/convert.h index cbbf0055e5a..a98a9f1807a 100644 --- a/mindspore/ccsrc/transform/graph_ir/convert.h +++ b/mindspore/ccsrc/transform/graph_ir/convert.h @@ -234,7 +234,6 @@ class DfGraphConvertor { void ConvertSpaceBatchNd(const FuncGraphPtr anf_graph) const; AnfNodePtr CreateCast(const AnfNodePtr &input, const TypePtr &dst_type) const; void ConvertReshape(const CNodePtr &node); - void ConvertPrint(const CNodePtr &node); void ConvertLoad(const CNodePtr &node); void ConvertHcomFusionId(const CNodePtr &node); void ConvertHcclNode(const CNodePtr &node); diff --git a/mindspore/ccsrc/transform/graph_ir/op_declare/array_ops_declare.cc b/mindspore/ccsrc/transform/graph_ir/op_declare/array_ops_declare.cc index 158eead8b72..62afa39d286 100644 --- a/mindspore/ccsrc/transform/graph_ir/op_declare/array_ops_declare.cc +++ b/mindspore/ccsrc/transform/graph_ir/op_declare/array_ops_declare.cc @@ -39,7 +39,6 @@ ATTR_MAP(Summary) = EMPTY_ATTR_MAP; REG_ADPT_DESC(Debug, prim::kPrimDebug->name(), ADPT_DESC(Summary)) // OutfeedEnqueueOpV2 -#ifndef ENABLE_SECURITY DYN_INPUT_MAP(OutfeedEnqueueOpV2) = {{2, DYN_INPUT_DESC(x)}}; INPUT_MAP(OutfeedEnqueueOpV2) = {{1, INPUT_DESC(tensor_name)}}; ATTR_MAP(OutfeedEnqueueOpV2) = {{"channel_name", ATTR_DESC(channel_name, AnyTraits())}}; @@ -49,7 +48,7 @@ REG_ADPT_DESC(ScalarSummary, "ScalarSummary", ADPT_DESC(OutfeedEnqueueOpV2)) REG_ADPT_DESC(ImageSummary, "ImageSummary", ADPT_DESC(OutfeedEnqueueOpV2)) REG_ADPT_DESC(HistogramSummary, "HistogramSummary", ADPT_DESC(OutfeedEnqueueOpV2)) REG_ADPT_DESC(TensorDump, kNameTensorDump, ADPT_DESC(OutfeedEnqueueOpV2)) -#endif +REG_ADPT_DESC(Print, kNamePrint, ADPT_DESC(OutfeedEnqueueOpV2)) // Data INPUT_MAP(Data) = EMPTY_INPUT_MAP; diff --git a/mindspore/ccsrc/transform/graph_ir/op_declare/logging_ops_declare.cc b/mindspore/ccsrc/transform/graph_ir/op_declare/logging_ops_declare.cc index 014ed19129e..a6f59dbbd4b 100644 --- a/mindspore/ccsrc/transform/graph_ir/op_declare/logging_ops_declare.cc +++ b/mindspore/ccsrc/transform/graph_ir/op_declare/logging_ops_declare.cc @@ -18,11 +18,6 @@ #include namespace mindspore::transform { -// PrintV2 -INPUT_MAP(PrintV2) = {{1, INPUT_DESC(x)}}; -ATTR_MAP(PrintV2) = {{"output_stream", ATTR_DESC(output_stream, AnyTraits())}}; -REG_ADPT_DESC(PrintV2, kNamePrint, ADPT_DESC(PrintV2)) - INPUT_MAP(Assert) = {{1, INPUT_DESC(input_condition)}}; DYN_INPUT_MAP(Assert) = {{2, DYN_INPUT_DESC(input_data)}}; ATTR_MAP(Assert) = {{"summarize", ATTR_DESC(summarize, AnyTraits())}}; diff --git a/tests/st/fallback/test_graph_fallback_print.py b/tests/st/fallback/test_graph_fallback_print.py index 4b6240a6bfe..00dbef64f2e 100644 --- a/tests/st/fallback/test_graph_fallback_print.py +++ b/tests/st/fallback/test_graph_fallback_print.py @@ -621,7 +621,7 @@ def test_print_in_lambda_func_graph_with_isolate_node(): time.sleep(0.1) assert (output == data).all() - patterns = {"Tensor(shape=[2 2], dtype=Int64, value=\n[[0 2]\n [4 6]])"} + patterns = {"Tensor(shape=[2, 2], dtype=Int64, value=\n[[0 2]\n [4 6]])"} check_output(cap.output, patterns)