forked from mindspore-Ecosystem/mindspore
issue fixed
This commit is contained in:
parent
f81ca1f020
commit
8b9e91f294
|
@ -464,7 +464,7 @@ else()
|
|||
COMPONENT ${RUNTIME_COMPONENT_NAME})
|
||||
|
||||
if(MSLITE_ENABLE_ACL)
|
||||
set(LITE_ACL_DIR ${TOP_DIR}/mindspore/lite/build/tools/converter/acl)
|
||||
set(LITE_ACL_DIR ${TOP_DIR}/mindspore/lite/build/tools/converter/adapter/acl)
|
||||
install(FILES ${LITE_ACL_DIR}/mindspore_shared_lib/libmindspore_shared_lib.so
|
||||
DESTINATION ${CONVERTER_ROOT_DIR}/lib COMPONENT ${RUNTIME_COMPONENT_NAME})
|
||||
endif()
|
||||
|
|
|
@ -69,14 +69,17 @@ set(MSLIB_SRC ${CMAKE_CURRENT_SOURCE_DIR}/types.cc
|
|||
|
||||
if(BUILD_LITE)
|
||||
list(APPEND MSLIB_SRC "${CMAKE_CURRENT_SOURCE_DIR}/../../../mindspore/ccsrc/utils/config_manager.cc")
|
||||
file(GLOB_RECURSE ACL_REMOVE_SRC ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
"model/acl/acl_vm/*.cc"
|
||||
)
|
||||
list(REMOVE_ITEM MSLIB_SRC "${CMAKE_CURRENT_SOURCE_DIR}/akg_kernel_register.cc"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/model/acl/acl_model_multi.cc"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/model/acl/acl_model.cc"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/model/acl/acl_vm/*.cc"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/serialization.cc"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/types.cc"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/model/model.cc"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/model/model_impl.cc"
|
||||
${ACL_REMOVE_SRC}
|
||||
$<TARGET_OBJECTS:_mindspore_vm_obj>)
|
||||
endif()
|
||||
|
||||
|
|
|
@ -167,8 +167,8 @@ REG_ADPT_DESC(GlobalAveragePool, kNameGlobalAvgPool, ADPT_DESC(GlobalAveragePool
|
|||
// Upsample
|
||||
INPUT_MAP(Upsample) = {{1, INPUT_DESC(x)}};
|
||||
ATTR_MAP(Upsample) = {{"scale", ATTR_DESC(scale, AnyTraits<float>())},
|
||||
{"h", ATTR_DESC(stride_h, AnyTraits<int64_t>())},
|
||||
{"w", ATTR_DESC(stride_w, AnyTraits<int64_t>())}};
|
||||
{"stride_h", ATTR_DESC(stride_h, AnyTraits<int64_t>())},
|
||||
{"stride_w", ATTR_DESC(stride_w, AnyTraits<int64_t>())}};
|
||||
OUTPUT_MAP(Upsample) = {{0, OUTPUT_DESC(y)}};
|
||||
REG_ADPT_DESC(Upsample, kNameUpsample, ADPT_DESC(Upsample))
|
||||
} // namespace mindspore::transform
|
||||
|
|
|
@ -16,6 +16,7 @@ file(GLOB ACL_SRC
|
|||
)
|
||||
|
||||
set(ENABLE_ACL on)
|
||||
set(MODE_ASCEND_ACL off)
|
||||
|
||||
add_subdirectory(${TOP_DIR}/mindspore/ccsrc/transform/graph_ir _mindspore_transform_graph_ir_obj)
|
||||
add_subdirectory(${TOP_DIR}/mindspore/ccsrc/cxx_api mindspore_shared_lib)
|
||||
|
|
|
@ -35,7 +35,7 @@
|
|||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
constexpr std::set kDevice = {"Ascend310", "Ascend710"};
|
||||
static const std::set<std::string> kDevice = {"Ascend310", "Ascend710"};
|
||||
namespace {
|
||||
constexpr auto kMakeTuple = "MakeTuple";
|
||||
constexpr auto kOutputNames = "outputs_names";
|
||||
|
@ -45,6 +45,8 @@ constexpr auto kNCHWFormat = "NCHW";
|
|||
constexpr auto kToNHWCFormatPass = "ToNHWCFormat";
|
||||
constexpr auto kToNCHWFormatPass = "ToNCHWFormat";
|
||||
constexpr auto kInferShapePass = "InferShapePass";
|
||||
constexpr auto kConstFoldPass = "ConstFoldPass";
|
||||
constexpr auto kRemoveRedundantOpPass = "RemoveRedundantOpPass";
|
||||
constexpr auto kDelRedundantTranspose = "DeleteRedundantTranspose";
|
||||
constexpr size_t kDependInputNum = 3;
|
||||
constexpr size_t kDependFirstInputIdx = 1;
|
||||
|
@ -54,7 +56,6 @@ constexpr size_t kTupleGetItemFirstInputIdx = 1;
|
|||
AclPassImpl::AclPassImpl(const converter::Flags &config)
|
||||
: device_type_(config.device),
|
||||
fmk_type_(config.fmk),
|
||||
graph_input_format_(std::move(config.graphInputFormatStr)),
|
||||
acl_model_option_cfg_(std::move(config.aclModelOptionCfgParam)) {}
|
||||
|
||||
ParameterPtr AclPassImpl::CreateOmParameter(const FuncGraphPtr &func_graph, const Buffer &om_data) {
|
||||
|
@ -153,14 +154,26 @@ STATUS AclPassImpl::DeparseGraph(const FuncGraphPtr &func_graph, const FuncGraph
|
|||
return lite::RET_OK;
|
||||
}
|
||||
|
||||
STATUS AclPassImpl::CommonPass(const FuncGraphPtr &func_graph) {
|
||||
if (!lite::RunOptimizerPass(func_graph, {kRemoveRedundantOpPass})) {
|
||||
MS_LOG(ERROR) << "Remove redundant op pass failed.";
|
||||
return lite::RET_ERROR;
|
||||
}
|
||||
if (!lite::RunOptimizerPass(func_graph, {kConstFoldPass})) {
|
||||
MS_LOG(ERROR) << "Const fold pass failed.";
|
||||
return lite::RET_ERROR;
|
||||
}
|
||||
return lite::RET_OK;
|
||||
}
|
||||
|
||||
STATUS AclPassImpl::PreProcGraph(const FuncGraphPtr &func_graph) {
|
||||
if (fmk_type_ == converter::kFmkTypeMs) {
|
||||
MS_LOG(INFO) << "MindIr no need to pre proc graph.";
|
||||
MS_LOG(DEBUG) << "MindIr no need to change format.";
|
||||
return lite::RET_OK;
|
||||
}
|
||||
// The format of nodes (cnode, parameter, val) must be nchw due to interface of convert om
|
||||
if (!lite::RunOptimizerPass(func_graph, {kInferShapePass, kToNCHWFormatPass, kDelRedundantTranspose})) {
|
||||
MS_LOG(ERROR) << "To nchw format success.";
|
||||
MS_LOG(ERROR) << "To nchw format failed.";
|
||||
return lite::RET_ERROR;
|
||||
}
|
||||
MS_LOG(DEBUG) << "Pre proc graph success.";
|
||||
|
@ -168,10 +181,6 @@ STATUS AclPassImpl::PreProcGraph(const FuncGraphPtr &func_graph) {
|
|||
}
|
||||
|
||||
STATUS AclPassImpl::PostProcGraph(const FuncGraphPtr &func_graph) {
|
||||
if (graph_input_format_ == kNCHWFormat) {
|
||||
MS_LOG(INFO) << "No need to transpose format to nhwc.";
|
||||
return lite::RET_OK;
|
||||
}
|
||||
if (!lite::RunOptimizerPass(func_graph, {kToNHWCFormatPass})) {
|
||||
MS_LOG(ERROR) << "To NHWC Format failed.";
|
||||
return lite::RET_ERROR;
|
||||
|
|
|
@ -44,6 +44,7 @@ class AclPassImpl {
|
|||
|
||||
private:
|
||||
bool IsDeviceAscend();
|
||||
STATUS CommonPass(const FuncGraphPtr &func_graph);
|
||||
STATUS PreProcGraph(const FuncGraphPtr &func_graph);
|
||||
STATUS PostProcGraph(const FuncGraphPtr &func_graph);
|
||||
STATUS DeparseGraph(const FuncGraphPtr &func_graph, const FuncGraphManagerPtr &manager);
|
||||
|
@ -66,7 +67,6 @@ class AclPassImpl {
|
|||
|
||||
std::string device_type_;
|
||||
FmkType fmk_type_;
|
||||
std::string graph_input_format_;
|
||||
lite::acl::AclModelOptionCfg acl_model_option_cfg_;
|
||||
ParameterPtr om_parameter_ = nullptr;
|
||||
CNodePtr custom_node_ = nullptr;
|
||||
|
|
|
@ -69,7 +69,12 @@ STATUS UpsampleMapper::AttrAdjust(const PrimitivePtr &src_prim, const ValueNodeP
|
|||
MS_LOG(DEBUG) << "The scale value: " << scale[1];
|
||||
auto dst_prim = std::make_shared<acl::Upsample>();
|
||||
CHECK_NULL_RETURN(dst_prim);
|
||||
dst_prim->AddAttr("scale", MakeValue(scale[1]));
|
||||
float attr_scale = 1;
|
||||
dst_prim->AddAttr("scale", MakeValue(attr_scale));
|
||||
int64_t stride_h = static_cast<int64_t>(scale[1]);
|
||||
int64_t stride_w = stride_h;
|
||||
dst_prim->AddAttr("stride_h", MakeValue(stride_h));
|
||||
dst_prim->AddAttr("stride_w", MakeValue(stride_w));
|
||||
val_node->set_value(dst_prim);
|
||||
return RET_OK;
|
||||
}
|
||||
|
|
|
@ -511,6 +511,7 @@ bool AnfTransform::StoreBuiltinPass(const converter::Flags *config) {
|
|||
auto is_train = config->trainModel;
|
||||
std::unordered_map<std::string, opt::PassPtr> passes = {
|
||||
{"DumpGraph", std::make_shared<opt::DumpGraph>(config)},
|
||||
{"RemoveRedundantOpPass", std::make_shared<opt::RemoveRedundantOpPass>(config->trainModel)},
|
||||
{"ToNCHWFormat", std::make_shared<opt::ToNCHWFormat>(fmk, is_train)},
|
||||
{"ToNHWCFormat", std::make_shared<opt::ToNHWCFormat>(fmk, is_train)},
|
||||
{"ConstFoldPass", std::make_shared<opt::ConstFoldPass>(fmk, is_train)},
|
||||
|
|
Loading…
Reference in New Issue