diff --git a/cmake/package_lite.cmake b/cmake/package_lite.cmake index 3c257d6443f..c6a75739d07 100644 --- a/cmake/package_lite.cmake +++ b/cmake/package_lite.cmake @@ -464,7 +464,7 @@ else() COMPONENT ${RUNTIME_COMPONENT_NAME}) if(MSLITE_ENABLE_ACL) - set(LITE_ACL_DIR ${TOP_DIR}/mindspore/lite/build/tools/converter/acl) + set(LITE_ACL_DIR ${TOP_DIR}/mindspore/lite/build/tools/converter/adapter/acl) install(FILES ${LITE_ACL_DIR}/mindspore_shared_lib/libmindspore_shared_lib.so DESTINATION ${CONVERTER_ROOT_DIR}/lib COMPONENT ${RUNTIME_COMPONENT_NAME}) endif() diff --git a/mindspore/ccsrc/cxx_api/CMakeLists.txt b/mindspore/ccsrc/cxx_api/CMakeLists.txt index 7874967dc23..9c0deb7e658 100644 --- a/mindspore/ccsrc/cxx_api/CMakeLists.txt +++ b/mindspore/ccsrc/cxx_api/CMakeLists.txt @@ -69,14 +69,17 @@ set(MSLIB_SRC ${CMAKE_CURRENT_SOURCE_DIR}/types.cc if(BUILD_LITE) list(APPEND MSLIB_SRC "${CMAKE_CURRENT_SOURCE_DIR}/../../../mindspore/ccsrc/utils/config_manager.cc") + file(GLOB_RECURSE ACL_REMOVE_SRC ${CMAKE_CURRENT_SOURCE_DIR} + "model/acl/acl_vm/*.cc" + ) list(REMOVE_ITEM MSLIB_SRC "${CMAKE_CURRENT_SOURCE_DIR}/akg_kernel_register.cc" "${CMAKE_CURRENT_SOURCE_DIR}/model/acl/acl_model_multi.cc" "${CMAKE_CURRENT_SOURCE_DIR}/model/acl/acl_model.cc" - "${CMAKE_CURRENT_SOURCE_DIR}/model/acl/acl_vm/*.cc" "${CMAKE_CURRENT_SOURCE_DIR}/serialization.cc" "${CMAKE_CURRENT_SOURCE_DIR}/types.cc" "${CMAKE_CURRENT_SOURCE_DIR}/model/model.cc" "${CMAKE_CURRENT_SOURCE_DIR}/model/model_impl.cc" + ${ACL_REMOVE_SRC} $) endif() diff --git a/mindspore/ccsrc/transform/graph_ir/op_declare/nn_pooling_ops_declare.cc b/mindspore/ccsrc/transform/graph_ir/op_declare/nn_pooling_ops_declare.cc index de979deaae5..8dbc793e00b 100644 --- a/mindspore/ccsrc/transform/graph_ir/op_declare/nn_pooling_ops_declare.cc +++ b/mindspore/ccsrc/transform/graph_ir/op_declare/nn_pooling_ops_declare.cc @@ -167,8 +167,8 @@ REG_ADPT_DESC(GlobalAveragePool, kNameGlobalAvgPool, ADPT_DESC(GlobalAveragePool // Upsample INPUT_MAP(Upsample) = {{1, INPUT_DESC(x)}}; ATTR_MAP(Upsample) = {{"scale", ATTR_DESC(scale, AnyTraits())}, - {"h", ATTR_DESC(stride_h, AnyTraits())}, - {"w", ATTR_DESC(stride_w, AnyTraits())}}; + {"stride_h", ATTR_DESC(stride_h, AnyTraits())}, + {"stride_w", ATTR_DESC(stride_w, AnyTraits())}}; OUTPUT_MAP(Upsample) = {{0, OUTPUT_DESC(y)}}; REG_ADPT_DESC(Upsample, kNameUpsample, ADPT_DESC(Upsample)) } // namespace mindspore::transform diff --git a/mindspore/lite/tools/converter/adapter/acl/CMakeLists.txt b/mindspore/lite/tools/converter/adapter/acl/CMakeLists.txt index 016abef46a2..e61ee8b7ba5 100644 --- a/mindspore/lite/tools/converter/adapter/acl/CMakeLists.txt +++ b/mindspore/lite/tools/converter/adapter/acl/CMakeLists.txt @@ -16,6 +16,7 @@ file(GLOB ACL_SRC ) set(ENABLE_ACL on) +set(MODE_ASCEND_ACL off) add_subdirectory(${TOP_DIR}/mindspore/ccsrc/transform/graph_ir _mindspore_transform_graph_ir_obj) add_subdirectory(${TOP_DIR}/mindspore/ccsrc/cxx_api mindspore_shared_lib) diff --git a/mindspore/lite/tools/converter/adapter/acl/acl_pass_impl.cc b/mindspore/lite/tools/converter/adapter/acl/acl_pass_impl.cc index e200bd4d0c1..f0c0d3815f3 100644 --- a/mindspore/lite/tools/converter/adapter/acl/acl_pass_impl.cc +++ b/mindspore/lite/tools/converter/adapter/acl/acl_pass_impl.cc @@ -35,7 +35,7 @@ namespace mindspore { namespace opt { -constexpr std::set kDevice = {"Ascend310", "Ascend710"}; +static const std::set kDevice = {"Ascend310", "Ascend710"}; namespace { constexpr auto kMakeTuple = "MakeTuple"; constexpr auto kOutputNames = "outputs_names"; @@ -45,6 +45,8 @@ constexpr auto kNCHWFormat = "NCHW"; constexpr auto kToNHWCFormatPass = "ToNHWCFormat"; constexpr auto kToNCHWFormatPass = "ToNCHWFormat"; constexpr auto kInferShapePass = "InferShapePass"; +constexpr auto kConstFoldPass = "ConstFoldPass"; +constexpr auto kRemoveRedundantOpPass = "RemoveRedundantOpPass"; constexpr auto kDelRedundantTranspose = "DeleteRedundantTranspose"; constexpr size_t kDependInputNum = 3; constexpr size_t kDependFirstInputIdx = 1; @@ -54,7 +56,6 @@ constexpr size_t kTupleGetItemFirstInputIdx = 1; AclPassImpl::AclPassImpl(const converter::Flags &config) : device_type_(config.device), fmk_type_(config.fmk), - graph_input_format_(std::move(config.graphInputFormatStr)), acl_model_option_cfg_(std::move(config.aclModelOptionCfgParam)) {} ParameterPtr AclPassImpl::CreateOmParameter(const FuncGraphPtr &func_graph, const Buffer &om_data) { @@ -153,14 +154,26 @@ STATUS AclPassImpl::DeparseGraph(const FuncGraphPtr &func_graph, const FuncGraph return lite::RET_OK; } +STATUS AclPassImpl::CommonPass(const FuncGraphPtr &func_graph) { + if (!lite::RunOptimizerPass(func_graph, {kRemoveRedundantOpPass})) { + MS_LOG(ERROR) << "Remove redundant op pass failed."; + return lite::RET_ERROR; + } + if (!lite::RunOptimizerPass(func_graph, {kConstFoldPass})) { + MS_LOG(ERROR) << "Const fold pass failed."; + return lite::RET_ERROR; + } + return lite::RET_OK; +} + STATUS AclPassImpl::PreProcGraph(const FuncGraphPtr &func_graph) { if (fmk_type_ == converter::kFmkTypeMs) { - MS_LOG(INFO) << "MindIr no need to pre proc graph."; + MS_LOG(DEBUG) << "MindIr no need to change format."; return lite::RET_OK; } // The format of nodes (cnode, parameter, val) must be nchw due to interface of convert om if (!lite::RunOptimizerPass(func_graph, {kInferShapePass, kToNCHWFormatPass, kDelRedundantTranspose})) { - MS_LOG(ERROR) << "To nchw format success."; + MS_LOG(ERROR) << "To nchw format failed."; return lite::RET_ERROR; } MS_LOG(DEBUG) << "Pre proc graph success."; @@ -168,10 +181,6 @@ STATUS AclPassImpl::PreProcGraph(const FuncGraphPtr &func_graph) { } STATUS AclPassImpl::PostProcGraph(const FuncGraphPtr &func_graph) { - if (graph_input_format_ == kNCHWFormat) { - MS_LOG(INFO) << "No need to transpose format to nhwc."; - return lite::RET_OK; - } if (!lite::RunOptimizerPass(func_graph, {kToNHWCFormatPass})) { MS_LOG(ERROR) << "To NHWC Format failed."; return lite::RET_ERROR; diff --git a/mindspore/lite/tools/converter/adapter/acl/acl_pass_impl.h b/mindspore/lite/tools/converter/adapter/acl/acl_pass_impl.h index 8fec468caa7..e57eff7bbbd 100644 --- a/mindspore/lite/tools/converter/adapter/acl/acl_pass_impl.h +++ b/mindspore/lite/tools/converter/adapter/acl/acl_pass_impl.h @@ -44,6 +44,7 @@ class AclPassImpl { private: bool IsDeviceAscend(); + STATUS CommonPass(const FuncGraphPtr &func_graph); STATUS PreProcGraph(const FuncGraphPtr &func_graph); STATUS PostProcGraph(const FuncGraphPtr &func_graph); STATUS DeparseGraph(const FuncGraphPtr &func_graph, const FuncGraphManagerPtr &manager); @@ -66,7 +67,6 @@ class AclPassImpl { std::string device_type_; FmkType fmk_type_; - std::string graph_input_format_; lite::acl::AclModelOptionCfg acl_model_option_cfg_; ParameterPtr om_parameter_ = nullptr; CNodePtr custom_node_ = nullptr; diff --git a/mindspore/lite/tools/converter/adapter/acl/mapper/upsample_mapper.cc b/mindspore/lite/tools/converter/adapter/acl/mapper/upsample_mapper.cc index 236bd758e9d..635bc7e53e5 100644 --- a/mindspore/lite/tools/converter/adapter/acl/mapper/upsample_mapper.cc +++ b/mindspore/lite/tools/converter/adapter/acl/mapper/upsample_mapper.cc @@ -69,7 +69,12 @@ STATUS UpsampleMapper::AttrAdjust(const PrimitivePtr &src_prim, const ValueNodeP MS_LOG(DEBUG) << "The scale value: " << scale[1]; auto dst_prim = std::make_shared(); CHECK_NULL_RETURN(dst_prim); - dst_prim->AddAttr("scale", MakeValue(scale[1])); + float attr_scale = 1; + dst_prim->AddAttr("scale", MakeValue(attr_scale)); + int64_t stride_h = static_cast(scale[1]); + int64_t stride_w = stride_h; + dst_prim->AddAttr("stride_h", MakeValue(stride_h)); + dst_prim->AddAttr("stride_w", MakeValue(stride_w)); val_node->set_value(dst_prim); return RET_OK; } diff --git a/mindspore/lite/tools/converter/anf_transform.cc b/mindspore/lite/tools/converter/anf_transform.cc index b4013b16c0f..4033eb94ec9 100644 --- a/mindspore/lite/tools/converter/anf_transform.cc +++ b/mindspore/lite/tools/converter/anf_transform.cc @@ -511,6 +511,7 @@ bool AnfTransform::StoreBuiltinPass(const converter::Flags *config) { auto is_train = config->trainModel; std::unordered_map passes = { {"DumpGraph", std::make_shared(config)}, + {"RemoveRedundantOpPass", std::make_shared(config->trainModel)}, {"ToNCHWFormat", std::make_shared(fmk, is_train)}, {"ToNHWCFormat", std::make_shared(fmk, is_train)}, {"ConstFoldPass", std::make_shared(fmk, is_train)},