forked from mindspore-Ecosystem/mindspore
upgrade_ascend_0626_mindspore
This commit is contained in:
parent
da7df61924
commit
63784e49f5
|
@ -25,15 +25,15 @@ string(REPLACE " -Wall" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
|||
string(REPLACE " -Werror" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
|
||||
if(ENABLE_GITEE)
|
||||
set(REQ_URL "https://gitee.com/mirrors/protobuf_source/repository/archive/v3.8.0.tar.gz")
|
||||
set(MD5 "eba86ae9f07ba5cfbaf8af3bc4e84236")
|
||||
set(REQ_URL "https://gitee.com/mirrors/protobuf_source/repository/archive/v3.13.0.tar.gz")
|
||||
set(MD5 "f4489cb88922ad9c58cb3308d59cee5")
|
||||
else()
|
||||
set(REQ_URL "https://github.com/protocolbuffers/protobuf/archive/v3.8.0.tar.gz")
|
||||
set(MD5 "3d9e32700639618a4d2d342c99d4507a")
|
||||
set(REQ_URL "https://github.com/protocolbuffers/protobuf/archive/v3.13.0.tar.gz")
|
||||
set(MD5 "1a6274bc4a65b55a6fa70e264d796490")
|
||||
endif()
|
||||
|
||||
mindspore_add_pkg(protobuf
|
||||
VER 3.8.0
|
||||
VER 3.13.0
|
||||
LIBS protobuf
|
||||
EXE protoc
|
||||
URL ${REQ_URL}
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 2d5ad3a72e5fefa3857eb9443083cd1117fd49a7
|
||||
Subproject commit b4b7c454e6041e864cdc2cc0fb5e61d3b5223c30
|
|
@ -293,6 +293,7 @@ if(MODE_ASCEND_ALL)
|
|||
find_library(ACL ascendcl ${ASCEND_RUNTIME_PATH} ${ASCEND_TOOLKIT_RUNTIME_PATH})
|
||||
find_library(PLATFORM platform ${ASCEND_RUNTIME_PATH} ${ASCEND_TOOLKIT_RUNTIME_PATH})
|
||||
find_library(OPTILING optiling ${ASCEND_OPP_PATH} ${ASCEND_TOOLKIT_OPP_PATH})
|
||||
find_library(OPT_FEATURE opt_feature ${ASCEND_RUNTIME_PATH} ${ASCEND_TOOLKIT_RUNTIME_PATH})
|
||||
|
||||
add_library(ms_profile SHARED
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/runtime/device/ascend/profiling/profiling_callback_register.cc)
|
||||
|
@ -301,7 +302,7 @@ if(MODE_ASCEND_ALL)
|
|||
target_link_libraries(ms_profile -Wl,--start-group -Wl,--whole-archive ${PROFILING} -Wl,--no-whole-archive
|
||||
mindspore::protobuf -Wl,--end-group)
|
||||
target_link_libraries(mindspore ${RUNTIME_LIB} ${TSDCLIENT} ${DATATRANSFER} -Wl,--no-as-needed ${OPTILING}
|
||||
${PLATFORM} ${ACL})
|
||||
${PLATFORM} ${ACL} ${OPT_FEATURE})
|
||||
target_link_libraries(mindspore -Wl,--start-group proto_input mindspore::protobuf -Wl,--end-group)
|
||||
elseif(CMAKE_SYSTEM_NAME MATCHES "Windows")
|
||||
target_link_libraries(mindspore -Wl,--start-group proto_input mindspore::protobuf mindspore::sentencepiece
|
||||
|
|
|
@ -46,6 +46,9 @@ void BatchMatmulFusedMulAddFusionPass::MatchBatchMatmulFusedMulAdd(const CNodePt
|
|||
void BatchMatmulFusedMulAddFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
|
||||
FusedNodeRecord *candidate_fusion) {
|
||||
MS_EXCEPTION_IF_NULL(candidate_fusion);
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::BatchMatmulFusedMulAddFusionPass)) {
|
||||
return;
|
||||
}
|
||||
std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
|
||||
for (auto &node : node_list) {
|
||||
if (!AnfAlgo::IsRealCNodeKernel(node) || fusion_id_allocator->HasFusionIdAttr(node) ||
|
||||
|
|
|
@ -69,6 +69,9 @@ void BnupdateEltwiseEltwiseFusionPass::MatchBnupdateAddRelu(const CNodePtr &cnod
|
|||
|
||||
void BnupdateEltwiseEltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
|
||||
FusedNodeRecord *candidate_fusion) {
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::BnupdateEltwiseEltwiseFusionPass)) {
|
||||
return;
|
||||
}
|
||||
MS_EXCEPTION_IF_NULL(candidate_fusion);
|
||||
std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
|
||||
for (auto &node : node_list) {
|
||||
|
|
|
@ -48,6 +48,9 @@ void BnupdateEltwiseFusionPass::MatchBnupdateDoubleOutputEltwise(const CNodePtr
|
|||
|
||||
void BnupdateEltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
|
||||
FusedNodeRecord *candidate_fusion) {
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::BnupdateEltwiseFusionPass)) {
|
||||
return;
|
||||
}
|
||||
MS_EXCEPTION_IF_NULL(candidate_fusion);
|
||||
std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
|
||||
for (auto &node : node_list) {
|
||||
|
|
|
@ -69,6 +69,9 @@ void Conv2DBackpropEltwiseEltwiseFusionPass::MatchConv2DBackpropInputEltwiseEltw
|
|||
|
||||
void Conv2DBackpropEltwiseEltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
|
||||
FusedNodeRecord *candidate_fusion) {
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::Conv2DBackpropEltwiseFusionPass)) {
|
||||
return;
|
||||
}
|
||||
MS_EXCEPTION_IF_NULL(candidate_fusion);
|
||||
std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
|
||||
for (auto &node : node_list) {
|
||||
|
|
|
@ -47,6 +47,9 @@ void Conv2DBackpropEltwiseFusionPass::MatchConv2DBackpropInputEltwise(const CNod
|
|||
|
||||
void Conv2DBackpropEltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
|
||||
FusedNodeRecord *candidate_fusion) {
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::Conv2DBackpropEltwiseFusionPass)) {
|
||||
return;
|
||||
}
|
||||
MS_EXCEPTION_IF_NULL(candidate_fusion);
|
||||
std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
|
||||
for (auto &node : node_list) {
|
||||
|
|
|
@ -45,6 +45,9 @@ void ConvBnReduceFusionPass::MatchConvBnreduce(const CNodePtr &cnode, const sess
|
|||
|
||||
void ConvBnReduceFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
|
||||
FusedNodeRecord *candidate_fusion) {
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::ConvBnReduceFusionPass)) {
|
||||
return;
|
||||
}
|
||||
MS_EXCEPTION_IF_NULL(candidate_fusion);
|
||||
std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
|
||||
for (auto &node : node_list) {
|
||||
|
|
|
@ -55,6 +55,9 @@ void EltwiseFusionPass::MatchEltwise(const CNodePtr &cnode, const session::Kerne
|
|||
void EltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
|
||||
FusedNodeRecord *candidate_fusion) {
|
||||
MS_EXCEPTION_IF_NULL(candidate_fusion);
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::EltwiseFusionPass)) {
|
||||
return;
|
||||
}
|
||||
std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
|
||||
std::reverse(node_list.begin(), node_list.end());
|
||||
for (auto &node : node_list) {
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
#include "runtime/device/kernel_info.h"
|
||||
#include "backend/kernel_compiler/kernel.h"
|
||||
#include "backend/session/kernel_graph.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
|
|
|
@ -44,6 +44,11 @@ void MatmulEltwiseFusionPass::MatchMatmulEltwise(const CNodePtr &cnode, const An
|
|||
void MatmulEltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
|
||||
FusedNodeRecord *candidate_fusion) {
|
||||
MS_EXCEPTION_IF_NULL(candidate_fusion);
|
||||
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::MatmulEltwiseFusionPass)) {
|
||||
return;
|
||||
}
|
||||
|
||||
std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
|
||||
for (auto &node : node_list) {
|
||||
if (!AnfAlgo::IsRealCNodeKernel(node) || fusion_id_allocator->HasFusionIdAttr(node) ||
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
#include "base/core_ops.h"
|
||||
#include "utils/ms_context.h"
|
||||
#include "backend/optimizer/common/fusion_id_allocator.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
|
@ -62,6 +63,9 @@ void MultiOutputFusionPass::MatchMultiOutputEltwise(const CNodePtr &cnode, const
|
|||
void MultiOutputFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
|
||||
FusedNodeRecord *candidate_fusion) {
|
||||
MS_EXCEPTION_IF_NULL(candidate_fusion);
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::MultiOutputFusionPass)) {
|
||||
return;
|
||||
}
|
||||
std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
|
||||
std::reverse(node_list.begin(), node_list.end());
|
||||
for (auto &node : node_list) {
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include "backend/session/anf_runtime_algorithm.h"
|
||||
#include "ir/primitive.h"
|
||||
#include "utils/utils.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
|
@ -44,6 +45,11 @@ const AnfNodePtr ClipByNormNoDivSquareSumFusion::Process(const FuncGraphPtr &gra
|
|||
MS_EXCEPTION_IF_NULL(graph);
|
||||
MS_EXCEPTION_IF_NULL(node);
|
||||
MS_EXCEPTION_IF_NULL(equiv);
|
||||
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::ClipByNormNoDivSquareSumFusion)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
BaseRef &input_gnode = (*equiv)[input_];
|
||||
BaseRef &constant_select_gnode = (*equiv)[constant_select_];
|
||||
BaseRef &constant_greater_gnode = (*equiv)[constant_greater_];
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
#include "utils/utils.h"
|
||||
#include "abstract/abstract_value.h"
|
||||
#include "backend/optimizer/common/helper.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
|
@ -105,6 +106,11 @@ const BaseRef DereluFusion::DefinePattern() const {
|
|||
const AnfNodePtr DereluFusion::Process(const FuncGraphPtr &graph, const AnfNodePtr &node, const EquivPtr &) const {
|
||||
MS_EXCEPTION_IF_NULL(graph);
|
||||
MS_EXCEPTION_IF_NULL(node);
|
||||
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::DereluFusion)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
auto relu_grad = node->cast<CNodePtr>();
|
||||
MS_EXCEPTION_IF_NULL(relu_grad);
|
||||
auto relu = GetRelu(relu_grad);
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include "backend/session/anf_runtime_algorithm.h"
|
||||
#include "utils/utils.h"
|
||||
#include "utils/trace_base.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
|
@ -193,6 +194,11 @@ const AnfNodePtr FusedBatchNormFusion::Process(const FuncGraphPtr &func_graph, c
|
|||
MS_EXCEPTION_IF_NULL(func_graph);
|
||||
MS_EXCEPTION_IF_NULL(equiv);
|
||||
MS_EXCEPTION_IF_NULL(node);
|
||||
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::FusedBatchNormFusion)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
AnfNodePtr bn_training_reduce = CreateBNTrainingReduce(func_graph, node, equiv);
|
||||
std::vector<AnfNodePtr> bn_training_reduce_outputs;
|
||||
CreateMultipleOutputsOfAnfNode(func_graph, bn_training_reduce, kBNTrainingReduceOutputNum,
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
#include "backend/session/anf_runtime_algorithm.h"
|
||||
#include "utils/utils.h"
|
||||
#include "utils/trace_base.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
const BaseRef MatmulBiasaddFusion::DefinePattern() const {
|
||||
|
@ -31,6 +33,9 @@ const AnfNodePtr MatmulBiasaddFusion::Process(const FuncGraphPtr &graph, const A
|
|||
const EquivPtr &equiv) const {
|
||||
MS_EXCEPTION_IF_NULL(node);
|
||||
MS_EXCEPTION_IF_NULL(graph);
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::MatmulBiasaddFusion)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
auto matmul = GetAnfNodeByVar(equiv, matmul_var_);
|
||||
if (matmul == nullptr || !matmul->isa<CNode>()) {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
#include <string>
|
||||
#include "backend/optimizer/common/helper.h"
|
||||
#include "backend/session/anf_runtime_algorithm.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
|
@ -52,6 +53,11 @@ const AnfNodePtr MomentumLossscaleFusion::Process(const FuncGraphPtr &func_graph
|
|||
const EquivPtr &) const {
|
||||
MS_EXCEPTION_IF_NULL(func_graph);
|
||||
MS_EXCEPTION_IF_NULL(node);
|
||||
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::MomentumLossscaleFusion)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
auto cnode = node->cast<CNodePtr>();
|
||||
MS_EXCEPTION_IF_NULL(cnode);
|
||||
CheckCNodeInputSize(cnode, kApplyMomentumInputTensorNum);
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include "backend/session/anf_runtime_algorithm.h"
|
||||
#include "frontend/optimizer/opt.h"
|
||||
#include "backend/optimizer/common/helper.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
|
@ -70,6 +71,11 @@ const AnfNodePtr MulAddFusion::Process(const FuncGraphPtr &graph, const AnfNodeP
|
|||
if (add == nullptr || AnfAlgo::GetInputTensorNum(add) != kAddInputTensorNum) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::MulAddFusion)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
CNodePtr mul = nullptr;
|
||||
size_t mul_index = 0;
|
||||
if (!GetMul(graph, add, &mul, &mul_index) || mul == nullptr || mul_index == 0) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include "utils/utils.h"
|
||||
#include "backend/optimizer/common/helper.h"
|
||||
#include "base/core_ops.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
|
@ -66,6 +67,11 @@ const AnfNodePtr ReshapeTransposeFusion::Process(const FuncGraphPtr &func_graph,
|
|||
if (!CheckShapeDimInfo(reshape_input0_shape) || !CheckShapeDimInfo(transpose_output0_shape)) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::ReshapeTransposeFusion)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
auto prim = std::make_shared<Primitive>(kConfusionTransposeDOpName);
|
||||
std::vector<AnfNodePtr> inputs = {NewValueNode(prim), utils::cast<AnfNodePtr>((*equiv)[input_varptr_])};
|
||||
auto new_node = func_graph->NewCNode(inputs);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
#include "ir/primitive.h"
|
||||
#include "utils/utils.h"
|
||||
#include "backend/optimizer/common/helper.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
|
@ -54,6 +55,11 @@ const AnfNodePtr SoftmaxGradExtFusion::Process(const FuncGraphPtr &graph, const
|
|||
MS_EXCEPTION_IF_NULL(graph);
|
||||
MS_EXCEPTION_IF_NULL(equiv);
|
||||
MS_EXCEPTION_IF_NULL(node);
|
||||
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::SoftmaxGradExtFusion)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
auto input0 = GetAnfNodeByVar(equiv, input0_);
|
||||
auto input1 = GetAnfNodeByVar(equiv, input1_);
|
||||
auto input2 = GetAnfNodeByVar(equiv, input2_);
|
||||
|
|
|
@ -26,6 +26,8 @@
|
|||
#include "backend/optimizer/common/helper.h"
|
||||
#include "runtime/device/kernel_info.h"
|
||||
#include "utils/trace_base.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
namespace {
|
||||
|
@ -97,6 +99,11 @@ const BaseRef SquareSumFusion::DefinePattern() const {
|
|||
const AnfNodePtr SquareSumFusion::Process(const FuncGraphPtr &graph, const AnfNodePtr &node, const EquivPtr &) const {
|
||||
MS_EXCEPTION_IF_NULL(graph);
|
||||
MS_EXCEPTION_IF_NULL(node);
|
||||
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::SquareSumFusion)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
CNodePtr sum = nullptr;
|
||||
AnfNodePtr square_anf = nullptr;
|
||||
CNodePtr square = nullptr;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include "utils/utils.h"
|
||||
#include "backend/optimizer/common/helper.h"
|
||||
#include "base/core_ops.h"
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace opt {
|
||||
|
@ -48,6 +49,11 @@ const AnfNodePtr TransposeReshapeFusion::Process(const FuncGraphPtr &func_graph,
|
|||
const EquivPtr &equiv) const {
|
||||
MS_EXCEPTION_IF_NULL(func_graph);
|
||||
MS_EXCEPTION_IF_NULL(equiv);
|
||||
|
||||
if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::TransposeReshapeFusion)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
auto reshape_cnode = CheckAnfNodeIfCNodeAndInputSize(node, kBackendReshapeInputTensorNum);
|
||||
MS_EXCEPTION_IF_NULL(reshape_cnode);
|
||||
auto transpose_cnode = CheckAnfNodeIfCNodeAndInputSize(reshape_cnode->input(1), kBackendReshapeInputTensorNum);
|
||||
|
|
|
@ -51,14 +51,15 @@ set_target_properties(mindspore_shared_lib PROPERTIES OUTPUT_NAME mindspore)
|
|||
|
||||
if(CMAKE_SYSTEM_NAME MATCHES "Darwin")
|
||||
target_link_libraries(mindspore_shared_lib PRIVATE ${PYTHON_LIBRARIES} ${SECUREC_LIBRARY}
|
||||
-Wl,-force_load mindspore -Wl,-noall_load proto_input mindspore_gvar mindspore::protobuf)
|
||||
-Wl,-force_load mindspore -Wl,-noall_load mindspore_core proto_input mindspore_gvar mindspore::protobuf)
|
||||
else()
|
||||
if(ENABLE_D OR ENABLE_GPU)
|
||||
target_link_libraries(mindspore_shared_lib PRIVATE -Wl,--as-needed ${PYTHON_LIBRARIES} ${SECUREC_LIBRARY}
|
||||
-Wl,--whole-archive mindspore -Wl,--no-whole-archive proto_input mindspore_gvar mindspore::protobuf)
|
||||
-Wl,--whole-archive mindspore -Wl,--no-whole-archive mindspore_core proto_input mindspore_gvar
|
||||
mindspore::protobuf)
|
||||
else()
|
||||
target_link_libraries(mindspore_shared_lib PRIVATE ${PYTHON_LIBRARIES} ${SECUREC_LIBRARY}
|
||||
mindspore proto_input mindspore_gvar mindspore::protobuf)
|
||||
mindspore mindspore_core proto_input mindspore_gvar mindspore::protobuf)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
|
|
@ -174,7 +174,7 @@ void DumpJsonParser::CopyMSCfgJsonToDir(uint32_t rank_id) {
|
|||
auto context = MsContext::GetInstance();
|
||||
MS_EXCEPTION_IF_NULL(context);
|
||||
ms_info["device_target"] = context->get_param<std::string>(MS_CTX_DEVICE_TARGET);
|
||||
ms_info["ms_version"] = "1.2.0";
|
||||
ms_info["ms_version"] = "1.3.0";
|
||||
const std::string file_path = realpath.value();
|
||||
ChangeFileMode(file_path, S_IWUSR);
|
||||
std::ofstream json_create(file_path);
|
||||
|
|
|
@ -113,7 +113,7 @@ void Debugger::Init(const uint32_t device_id, const std::string device_target) {
|
|||
device_id_ = device_id;
|
||||
MS_LOG(INFO) << "Debugger got device_target: " << device_target;
|
||||
device_target_ = device_target;
|
||||
version_ = "1.2.0";
|
||||
version_ = "1.3.0";
|
||||
}
|
||||
|
||||
bool IsTypeDebuggerSupported(TypeId type) {
|
||||
|
|
|
@ -48,7 +48,7 @@ DbgServices::~DbgServices() {
|
|||
|
||||
std::string DbgServices::GetVersion() {
|
||||
MS_LOG(INFO) << "get version is called";
|
||||
return "1.2.0";
|
||||
return "1.3.0";
|
||||
}
|
||||
|
||||
int32_t DbgServices::Initialize(std::string net_name, std::string dump_folder_path, bool is_sync_mode) {
|
||||
|
|
|
@ -0,0 +1,127 @@
|
|||
/**
|
||||
* Copyright 2021 Huawei Technologies Co., Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include "runtime/device/ascend/lic_manager.h"
|
||||
#include <regex>
|
||||
#include "utils/ms_context.h"
|
||||
#include "runtime/dev.h"
|
||||
|
||||
namespace gelc {
|
||||
uint32_t GetOptInfo(uint32_t, const std::string &, std::map<std::string, std::string> &); // NOLINT
|
||||
} // namespace gelc
|
||||
|
||||
namespace mindspore {
|
||||
namespace {
|
||||
constexpr auto kFeKey = "opt_module.fe";
|
||||
constexpr auto kOpTuneKey = "opt_module.op_tune";
|
||||
constexpr auto kAllOpen = "ALL";
|
||||
|
||||
static const std::map<std::string, OptPassEnum> kPassCodeMap = {
|
||||
{std::to_string(3), OptPassEnum::MatmulBiasaddFusion},
|
||||
{std::to_string(8), OptPassEnum::DereluFusion},
|
||||
{std::to_string(9), OptPassEnum::TransposeReshapeFusion},
|
||||
{std::to_string(10), OptPassEnum::MomentumLossscaleFusion},
|
||||
{std::to_string(12), OptPassEnum::FusedBatchNormFusion},
|
||||
{std::to_string(15), OptPassEnum::BnupdateEltwiseEltwiseFusionPass},
|
||||
{std::to_string(16), OptPassEnum::BnupdateEltwiseFusionPass},
|
||||
{std::to_string(17), OptPassEnum::Conv2DBackpropEltwiseFusionPass},
|
||||
{std::to_string(18), OptPassEnum::ConvBnReduceFusionPass},
|
||||
{std::to_string(26), OptPassEnum::ReshapeTransposeFusion},
|
||||
{std::to_string(27), OptPassEnum::SquareSumFusion},
|
||||
{std::to_string(30), OptPassEnum::MatmulEltwiseFusionPass},
|
||||
{std::to_string(33), OptPassEnum::BatchMatmulFusedMulAddFusionPass},
|
||||
{std::to_string(34), OptPassEnum::EltwiseFusionPass},
|
||||
{std::to_string(36), OptPassEnum::MultiOutputFusionPass},
|
||||
{std::to_string(37), OptPassEnum::MulAddFusion},
|
||||
{std::to_string(38), OptPassEnum::SoftmaxGradExtFusion},
|
||||
{std::to_string(39), OptPassEnum::ClipByNormNoDivSquareSumFusion},
|
||||
};
|
||||
|
||||
inline std::vector<std::string> SplitStrByRegex(const std::string &str, const std::string ®ex) {
|
||||
std::regex split(regex);
|
||||
return std::vector<std::string>(std::sregex_token_iterator(str.begin(), str.end(), split, -1),
|
||||
std::sregex_token_iterator());
|
||||
}
|
||||
|
||||
static std::string GetSocVersion() {
|
||||
constexpr int kSocVersionLen = 50;
|
||||
char soc_version[kSocVersionLen] = {0};
|
||||
auto ret = rtGetSocVersion(soc_version, kSocVersionLen);
|
||||
if (ret != RT_ERROR_NONE) {
|
||||
MS_LOG(WARNING) << "rtGetSocVersion failed, ret = " << ret;
|
||||
return "Ascend910";
|
||||
}
|
||||
|
||||
return soc_version;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
LicManager &LicManager::GetInstance() {
|
||||
static LicManager lic_manager{};
|
||||
return lic_manager;
|
||||
}
|
||||
|
||||
bool LicManager::GetPassSwitch(OptPassEnum pass) {
|
||||
if (!init_flag) {
|
||||
ParseSwitch();
|
||||
}
|
||||
auto iter = pass_switch_.find(pass);
|
||||
if (iter == pass_switch_.end()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return iter->second;
|
||||
}
|
||||
|
||||
void LicManager::ParseSwitch() {
|
||||
std::map<std::string, std::string> opt_info_map;
|
||||
auto ret = gelc::GetOptInfo(0, GetSocVersion(), opt_info_map);
|
||||
if (ret != 0) {
|
||||
MS_LOG(WARNING) << "GetOptInfo failed.";
|
||||
init_flag = true;
|
||||
return;
|
||||
}
|
||||
|
||||
auto iter = opt_info_map.find(kFeKey);
|
||||
if (iter != opt_info_map.end()) {
|
||||
ParseFeSwitch(iter->second);
|
||||
}
|
||||
|
||||
init_flag = true;
|
||||
}
|
||||
|
||||
void LicManager::ParseFeSwitch(const std::string &options_str) {
|
||||
// invalid options, do nothing.
|
||||
if (options_str.empty()) {
|
||||
return;
|
||||
}
|
||||
// "All" in options means all open, do nothing.
|
||||
if (options_str.find(kAllOpen) != std::string::npos) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (auto iter = kPassCodeMap.begin(); iter != kPassCodeMap.end(); ++iter) {
|
||||
auto pass = iter->second;
|
||||
pass_switch_.emplace(pass, false);
|
||||
}
|
||||
auto fe_pass = SplitStrByRegex(options_str, ":");
|
||||
for (auto &pass_code : fe_pass) {
|
||||
auto iter = kPassCodeMap.find(pass_code);
|
||||
if (iter != kPassCodeMap.end()) {
|
||||
pass_switch_[iter->second] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace mindspore
|
|
@ -0,0 +1,59 @@
|
|||
/**
|
||||
* Copyright 2021 Huawei Technologies Co., Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef MINDSPORE_CCSRC_RUNTIME_DEVICE_ASCEND_PASS_SWITCH_MANAGER_H_
|
||||
#define MINDSPORE_CCSRC_RUNTIME_DEVICE_ASCEND_PASS_SWITCH_MANAGER_H_
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
namespace mindspore {
|
||||
enum class OptPassEnum {
|
||||
MatmulBiasaddFusion,
|
||||
MulAddFusion,
|
||||
ReshapeTransposeFusion,
|
||||
SoftmaxGradExtFusion,
|
||||
SquareSumFusion,
|
||||
TransposeReshapeFusion,
|
||||
ClipByNormNoDivSquareSumFusion,
|
||||
MomentumLossscaleFusion,
|
||||
DereluFusion,
|
||||
FusedBatchNormFusion,
|
||||
MatmulEltwiseFusionPass,
|
||||
BatchMatmulFusedMulAddFusionPass,
|
||||
EltwiseFusionPass,
|
||||
MultiOutputFusionPass,
|
||||
BnupdateEltwiseEltwiseFusionPass,
|
||||
BnupdateEltwiseFusionPass,
|
||||
Conv2DBackpropEltwiseFusionPass,
|
||||
ConvBnReduceFusionPass,
|
||||
};
|
||||
|
||||
class LicManager {
|
||||
public:
|
||||
static LicManager &GetInstance();
|
||||
bool GetPassSwitch(OptPassEnum pass);
|
||||
|
||||
private:
|
||||
void ParseSwitch();
|
||||
void ParseFeSwitch(const std::string &options_str);
|
||||
|
||||
bool init_flag = false;
|
||||
std::map<OptPassEnum, bool> pass_switch_ = {};
|
||||
};
|
||||
} // namespace mindspore
|
||||
#endif // MINDSPORE_CCSRC_RUNTIME_DEVICE_ASCEND_PASS_SWITCH_MANAGER_H_
|
|
@ -207,7 +207,7 @@ class AscendEnvChecker(EnvChecker):
|
|||
"""ascend environment check"""
|
||||
|
||||
def __init__(self):
|
||||
self.version = ["1.78.23.0.230"]
|
||||
self.version = ["1.78.23.1.230"]
|
||||
atlas_nnae_version = "/usr/local/Ascend/nnae/latest/fwkacllib/version.info"
|
||||
atlas_toolkit_version = "/usr/local/Ascend/ascend-toolkit/latest/fwkacllib/version.info"
|
||||
hisi_fwk_version = "/usr/local/Ascend/fwkacllib/version.info"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
numpy >= 1.17.0
|
||||
protobuf >= 3.8.0
|
||||
protobuf >= 3.13.0
|
||||
asttokens >= 1.1.13
|
||||
pillow >= 6.2.0
|
||||
scipy >= 1.5.2
|
||||
|
@ -17,4 +17,4 @@ packaging >= 20.0
|
|||
pycocotools >= 2.0.2 # for st test
|
||||
tables >= 3.6.1 # for st test
|
||||
easydict >= 1.9 # for st test
|
||||
psutil >= 5.7.0
|
||||
psutil >= 5.7.0
|
||||
|
|
2
setup.py
2
setup.py
|
@ -108,7 +108,7 @@ build_dependencies()
|
|||
|
||||
required_package = [
|
||||
'numpy >= 1.17.0',
|
||||
'protobuf >= 3.8.0',
|
||||
'protobuf >= 3.13.0',
|
||||
'asttokens >= 1.1.13',
|
||||
'pillow >= 6.2.0',
|
||||
'scipy >= 1.5.2',
|
||||
|
|
|
@ -126,7 +126,7 @@ def op_network_with_step_num(dataset, step_num):
|
|||
return op_network_with_epoch(net_with_dataset, step_num)
|
||||
|
||||
|
||||
@pytest.mark.level0
|
||||
@pytest.mark.level1
|
||||
@pytest.mark.platform_arm_ascend_training
|
||||
@pytest.mark.platform_x86_ascend_training
|
||||
@pytest.mark.env_onecard
|
||||
|
|
|
@ -51,7 +51,7 @@ class TestModifiedReLU:
|
|||
self.abs_ = P.Abs()
|
||||
self.reshape = P.Reshape()
|
||||
|
||||
@pytest.mark.level0
|
||||
@pytest.mark.level1
|
||||
@pytest.mark.platform_arm_ascend_training
|
||||
@pytest.mark.platform_x86_ascend_training
|
||||
@pytest.mark.env_onecard
|
||||
|
|
|
@ -124,6 +124,7 @@ file(GLOB_RECURSE MINDSPORE_SRC_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR}
|
|||
"../../../mindspore/ccsrc/runtime/device/ascend/ascend_memory_manager.cc"
|
||||
"../../../mindspore/ccsrc/runtime/device/ascend/ascend_device_address.cc"
|
||||
"../../../mindspore/ccsrc/runtime/device/ascend/ascend_memory_pool.cc"
|
||||
"../../../mindspore/ccsrc/runtime/device/ascend/lic_manager.cc"
|
||||
"../../../mindspore/ccsrc/backend/kernel_compiler/cpu/cpu_kernel.cc"
|
||||
"../../../mindspore/ccsrc/backend/kernel_compiler/cpu/cpu_kernel_factory.cc"
|
||||
"../../../mindspore/ccsrc/backend/kernel_compiler/cpu/sparse_apply_adam_cpu_kernel.cc"
|
||||
|
|
|
@ -51,4 +51,10 @@ Status GEFinalize() { return ge::GRAPH_SUCCESS; }
|
|||
Status Graph::SaveToFile(const string& file_name) const { return ge::GRAPH_SUCCESS; }
|
||||
|
||||
} // namespace ge
|
||||
|
||||
namespace gelc {
|
||||
uint32_t GetOptInfo(uint32_t mode, const std::string &soc_ver, std::map<std::string, std::string> &opt_info_map) {
|
||||
return 0;
|
||||
}
|
||||
} // namespace gelc
|
||||
#endif
|
||||
|
|
Loading…
Reference in New Issue