diff --git a/cmake/external_libs/protobuf.cmake b/cmake/external_libs/protobuf.cmake
index 1d280d6d0c0..7067a1ed18a 100644
--- a/cmake/external_libs/protobuf.cmake
+++ b/cmake/external_libs/protobuf.cmake
@@ -25,15 +25,15 @@ string(REPLACE " -Wall" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
 string(REPLACE " -Werror" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
 
 if(ENABLE_GITEE)
-    set(REQ_URL "https://gitee.com/mirrors/protobuf_source/repository/archive/v3.8.0.tar.gz")
-    set(MD5 "eba86ae9f07ba5cfbaf8af3bc4e84236")
+    set(REQ_URL "https://gitee.com/mirrors/protobuf_source/repository/archive/v3.13.0.tar.gz")
+    set(MD5 "f4489cb88922ad9c58cb3308d59cee5")
 else()
-    set(REQ_URL "https://github.com/protocolbuffers/protobuf/archive/v3.8.0.tar.gz")
-    set(MD5 "3d9e32700639618a4d2d342c99d4507a")
+    set(REQ_URL "https://github.com/protocolbuffers/protobuf/archive/v3.13.0.tar.gz")
+    set(MD5 "1a6274bc4a65b55a6fa70e264d796490")
 endif()
 
 mindspore_add_pkg(protobuf
-        VER 3.8.0
+        VER 3.13.0
         LIBS protobuf
         EXE protoc
         URL ${REQ_URL}
diff --git a/graphengine b/graphengine
index 2d5ad3a72e5..b4b7c454e60 160000
--- a/graphengine
+++ b/graphengine
@@ -1 +1 @@
-Subproject commit 2d5ad3a72e5fefa3857eb9443083cd1117fd49a7
+Subproject commit b4b7c454e6041e864cdc2cc0fb5e61d3b5223c30
diff --git a/mindspore/ccsrc/CMakeLists.txt b/mindspore/ccsrc/CMakeLists.txt
index 947ec6e49ca..ebd90cc2b43 100644
--- a/mindspore/ccsrc/CMakeLists.txt
+++ b/mindspore/ccsrc/CMakeLists.txt
@@ -293,6 +293,7 @@ if(MODE_ASCEND_ALL)
     find_library(ACL ascendcl ${ASCEND_RUNTIME_PATH} ${ASCEND_TOOLKIT_RUNTIME_PATH})
     find_library(PLATFORM platform ${ASCEND_RUNTIME_PATH} ${ASCEND_TOOLKIT_RUNTIME_PATH})
     find_library(OPTILING optiling ${ASCEND_OPP_PATH} ${ASCEND_TOOLKIT_OPP_PATH})
+    find_library(OPT_FEATURE opt_feature ${ASCEND_RUNTIME_PATH} ${ASCEND_TOOLKIT_RUNTIME_PATH})
 
     add_library(ms_profile SHARED
                 ${CMAKE_CURRENT_SOURCE_DIR}/runtime/device/ascend/profiling/profiling_callback_register.cc)
@@ -301,7 +302,7 @@ if(MODE_ASCEND_ALL)
     target_link_libraries(ms_profile -Wl,--start-group -Wl,--whole-archive ${PROFILING} -Wl,--no-whole-archive
                           mindspore::protobuf -Wl,--end-group)
     target_link_libraries(mindspore ${RUNTIME_LIB} ${TSDCLIENT} ${DATATRANSFER} -Wl,--no-as-needed ${OPTILING}
-      ${PLATFORM} ${ACL})
+      ${PLATFORM} ${ACL} ${OPT_FEATURE})
     target_link_libraries(mindspore -Wl,--start-group proto_input mindspore::protobuf -Wl,--end-group)
 elseif(CMAKE_SYSTEM_NAME MATCHES "Windows")
     target_link_libraries(mindspore -Wl,--start-group proto_input mindspore::protobuf mindspore::sentencepiece
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/batchmatmul_fusedmuladd_fusion_pass.cc b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/batchmatmul_fusedmuladd_fusion_pass.cc
index 16aeeaa6aec..3a789bfb95c 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/batchmatmul_fusedmuladd_fusion_pass.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/batchmatmul_fusedmuladd_fusion_pass.cc
@@ -46,6 +46,9 @@ void BatchMatmulFusedMulAddFusionPass::MatchBatchMatmulFusedMulAdd(const CNodePt
 void BatchMatmulFusedMulAddFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
                                                                 FusedNodeRecord *candidate_fusion) {
   MS_EXCEPTION_IF_NULL(candidate_fusion);
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::BatchMatmulFusedMulAddFusionPass)) {
+    return;
+  }
   std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
   for (auto &node : node_list) {
     if (!AnfAlgo::IsRealCNodeKernel(node) || fusion_id_allocator->HasFusionIdAttr(node) ||
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/bnupdate_eltwise_eltwise_fusion_pass.cc b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/bnupdate_eltwise_eltwise_fusion_pass.cc
index 39088cb376f..0752f90be0e 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/bnupdate_eltwise_eltwise_fusion_pass.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/bnupdate_eltwise_eltwise_fusion_pass.cc
@@ -69,6 +69,9 @@ void BnupdateEltwiseEltwiseFusionPass::MatchBnupdateAddRelu(const CNodePtr &cnod
 
 void BnupdateEltwiseEltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
                                                                 FusedNodeRecord *candidate_fusion) {
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::BnupdateEltwiseEltwiseFusionPass)) {
+    return;
+  }
   MS_EXCEPTION_IF_NULL(candidate_fusion);
   std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
   for (auto &node : node_list) {
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/bnupdate_eltwise_fusion_pass.cc b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/bnupdate_eltwise_fusion_pass.cc
index 10793ef4405..c3315857515 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/bnupdate_eltwise_fusion_pass.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/bnupdate_eltwise_fusion_pass.cc
@@ -48,6 +48,9 @@ void BnupdateEltwiseFusionPass::MatchBnupdateDoubleOutputEltwise(const CNodePtr
 
 void BnupdateEltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
                                                          FusedNodeRecord *candidate_fusion) {
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::BnupdateEltwiseFusionPass)) {
+    return;
+  }
   MS_EXCEPTION_IF_NULL(candidate_fusion);
   std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
   for (auto &node : node_list) {
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv2dbackprop_eltwise_eltwise_fusion_pass.cc b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv2dbackprop_eltwise_eltwise_fusion_pass.cc
index 4103416edc7..f3e4018f3d9 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv2dbackprop_eltwise_eltwise_fusion_pass.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv2dbackprop_eltwise_eltwise_fusion_pass.cc
@@ -69,6 +69,9 @@ void Conv2DBackpropEltwiseEltwiseFusionPass::MatchConv2DBackpropInputEltwiseEltw
 
 void Conv2DBackpropEltwiseEltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
                                                                       FusedNodeRecord *candidate_fusion) {
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::Conv2DBackpropEltwiseFusionPass)) {
+    return;
+  }
   MS_EXCEPTION_IF_NULL(candidate_fusion);
   std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
   for (auto &node : node_list) {
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv2dbackprop_eltwise_fusion_pass.cc b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv2dbackprop_eltwise_fusion_pass.cc
index 6960f41e93e..88f0bb8b5cc 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv2dbackprop_eltwise_fusion_pass.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv2dbackprop_eltwise_fusion_pass.cc
@@ -47,6 +47,9 @@ void Conv2DBackpropEltwiseFusionPass::MatchConv2DBackpropInputEltwise(const CNod
 
 void Conv2DBackpropEltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
                                                                FusedNodeRecord *candidate_fusion) {
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::Conv2DBackpropEltwiseFusionPass)) {
+    return;
+  }
   MS_EXCEPTION_IF_NULL(candidate_fusion);
   std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
   for (auto &node : node_list) {
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv_bnreduce_fusion_pass.cc b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv_bnreduce_fusion_pass.cc
index ff94f0c71ba..aacab919413 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv_bnreduce_fusion_pass.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/conv_bnreduce_fusion_pass.cc
@@ -45,6 +45,9 @@ void ConvBnReduceFusionPass::MatchConvBnreduce(const CNodePtr &cnode, const sess
 
 void ConvBnReduceFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
                                                       FusedNodeRecord *candidate_fusion) {
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::ConvBnReduceFusionPass)) {
+    return;
+  }
   MS_EXCEPTION_IF_NULL(candidate_fusion);
   std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
   for (auto &node : node_list) {
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/eltwise_fusion_pass.cc b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/eltwise_fusion_pass.cc
index 19c4d6ace53..b2edacd5469 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/eltwise_fusion_pass.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/eltwise_fusion_pass.cc
@@ -55,6 +55,9 @@ void EltwiseFusionPass::MatchEltwise(const CNodePtr &cnode, const session::Kerne
 void EltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
                                                  FusedNodeRecord *candidate_fusion) {
   MS_EXCEPTION_IF_NULL(candidate_fusion);
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::EltwiseFusionPass)) {
+    return;
+  }
   std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
   std::reverse(node_list.begin(), node_list.end());
   for (auto &node : node_list) {
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/fusion_base_pass.h b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/fusion_base_pass.h
index 76cd8c980bf..629cd99dd7a 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/fusion_base_pass.h
+++ b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/fusion_base_pass.h
@@ -26,6 +26,7 @@
 #include "runtime/device/kernel_info.h"
 #include "backend/kernel_compiler/kernel.h"
 #include "backend/session/kernel_graph.h"
+#include "runtime/device/ascend/lic_manager.h"
 
 namespace mindspore {
 namespace opt {
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/matmul_eltwise_fusion_pass.cc b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/matmul_eltwise_fusion_pass.cc
index e958fb0877d..ef4f7013b0f 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/matmul_eltwise_fusion_pass.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/matmul_eltwise_fusion_pass.cc
@@ -44,6 +44,11 @@ void MatmulEltwiseFusionPass::MatchMatmulEltwise(const CNodePtr &cnode, const An
 void MatmulEltwiseFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
                                                        FusedNodeRecord *candidate_fusion) {
   MS_EXCEPTION_IF_NULL(candidate_fusion);
+
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::MatmulEltwiseFusionPass)) {
+    return;
+  }
+
   std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
   for (auto &node : node_list) {
     if (!AnfAlgo::IsRealCNodeKernel(node) || fusion_id_allocator->HasFusionIdAttr(node) ||
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/multi_output_fusion_pass.cc b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/multi_output_fusion_pass.cc
index 8c7ccd40fe7..4142f297f25 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/multi_output_fusion_pass.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/buffer_fusion/multi_output_fusion_pass.cc
@@ -23,6 +23,7 @@
 #include "base/core_ops.h"
 #include "utils/ms_context.h"
 #include "backend/optimizer/common/fusion_id_allocator.h"
+#include "runtime/device/ascend/lic_manager.h"
 
 namespace mindspore {
 namespace opt {
@@ -62,6 +63,9 @@ void MultiOutputFusionPass::MatchMultiOutputEltwise(const CNodePtr &cnode, const
 void MultiOutputFusionPass::MatchSingleFusionPattern(const session::KernelGraph &kernel_graph,
                                                      FusedNodeRecord *candidate_fusion) {
   MS_EXCEPTION_IF_NULL(candidate_fusion);
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::MultiOutputFusionPass)) {
+    return;
+  }
   std::vector<AnfNodePtr> node_list = TopoSort(kernel_graph.get_return());
   std::reverse(node_list.begin(), node_list.end());
   for (auto &node : node_list) {
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/clip_by_norm_no_div_square_sum_fusion.cc b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/clip_by_norm_no_div_square_sum_fusion.cc
index 61726eeb997..25b5275b3aa 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/clip_by_norm_no_div_square_sum_fusion.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/clip_by_norm_no_div_square_sum_fusion.cc
@@ -21,6 +21,7 @@
 #include "backend/session/anf_runtime_algorithm.h"
 #include "ir/primitive.h"
 #include "utils/utils.h"
+#include "runtime/device/ascend/lic_manager.h"
 
 namespace mindspore {
 namespace opt {
@@ -44,6 +45,11 @@ const AnfNodePtr ClipByNormNoDivSquareSumFusion::Process(const FuncGraphPtr &gra
   MS_EXCEPTION_IF_NULL(graph);
   MS_EXCEPTION_IF_NULL(node);
   MS_EXCEPTION_IF_NULL(equiv);
+
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::ClipByNormNoDivSquareSumFusion)) {
+    return node;
+  }
+
   BaseRef &input_gnode = (*equiv)[input_];
   BaseRef &constant_select_gnode = (*equiv)[constant_select_];
   BaseRef &constant_greater_gnode = (*equiv)[constant_greater_];
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/derelu_fusion.cc b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/derelu_fusion.cc
index 7699d01f6ae..0fef41ad70a 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/derelu_fusion.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/derelu_fusion.cc
@@ -22,6 +22,7 @@
 #include "utils/utils.h"
 #include "abstract/abstract_value.h"
 #include "backend/optimizer/common/helper.h"
+#include "runtime/device/ascend/lic_manager.h"
 
 namespace mindspore {
 namespace opt {
@@ -105,6 +106,11 @@ const BaseRef DereluFusion::DefinePattern() const {
 const AnfNodePtr DereluFusion::Process(const FuncGraphPtr &graph, const AnfNodePtr &node, const EquivPtr &) const {
   MS_EXCEPTION_IF_NULL(graph);
   MS_EXCEPTION_IF_NULL(node);
+
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::DereluFusion)) {
+    return node;
+  }
+
   auto relu_grad = node->cast<CNodePtr>();
   MS_EXCEPTION_IF_NULL(relu_grad);
   auto relu = GetRelu(relu_grad);
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/fused_batch_norm_fusion.cc b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/fused_batch_norm_fusion.cc
index 6bf7a8abb0f..e3f7311cf5e 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/fused_batch_norm_fusion.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/fused_batch_norm_fusion.cc
@@ -20,6 +20,7 @@
 #include "backend/session/anf_runtime_algorithm.h"
 #include "utils/utils.h"
 #include "utils/trace_base.h"
+#include "runtime/device/ascend/lic_manager.h"
 
 namespace mindspore {
 namespace opt {
@@ -193,6 +194,11 @@ const AnfNodePtr FusedBatchNormFusion::Process(const FuncGraphPtr &func_graph, c
   MS_EXCEPTION_IF_NULL(func_graph);
   MS_EXCEPTION_IF_NULL(equiv);
   MS_EXCEPTION_IF_NULL(node);
+
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::FusedBatchNormFusion)) {
+    return node;
+  }
+
   AnfNodePtr bn_training_reduce = CreateBNTrainingReduce(func_graph, node, equiv);
   std::vector<AnfNodePtr> bn_training_reduce_outputs;
   CreateMultipleOutputsOfAnfNode(func_graph, bn_training_reduce, kBNTrainingReduceOutputNum,
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/matmul_biasadd_fusion.cc b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/matmul_biasadd_fusion.cc
index c36dde8cfab..310ed304403 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/matmul_biasadd_fusion.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/matmul_biasadd_fusion.cc
@@ -19,6 +19,8 @@
 #include "backend/session/anf_runtime_algorithm.h"
 #include "utils/utils.h"
 #include "utils/trace_base.h"
+#include "runtime/device/ascend/lic_manager.h"
+
 namespace mindspore {
 namespace opt {
 const BaseRef MatmulBiasaddFusion::DefinePattern() const {
@@ -31,6 +33,9 @@ const AnfNodePtr MatmulBiasaddFusion::Process(const FuncGraphPtr &graph, const A
                                               const EquivPtr &equiv) const {
   MS_EXCEPTION_IF_NULL(node);
   MS_EXCEPTION_IF_NULL(graph);
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::MatmulBiasaddFusion)) {
+    return node;
+  }
 
   auto matmul = GetAnfNodeByVar(equiv, matmul_var_);
   if (matmul == nullptr || !matmul->isa<CNode>()) {
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/momentum_lossscale_fusion.cc b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/momentum_lossscale_fusion.cc
index af043b067b2..490167864d6 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/momentum_lossscale_fusion.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/momentum_lossscale_fusion.cc
@@ -19,6 +19,7 @@
 #include <string>
 #include "backend/optimizer/common/helper.h"
 #include "backend/session/anf_runtime_algorithm.h"
+#include "runtime/device/ascend/lic_manager.h"
 
 namespace mindspore {
 namespace opt {
@@ -52,6 +53,11 @@ const AnfNodePtr MomentumLossscaleFusion::Process(const FuncGraphPtr &func_graph
                                                   const EquivPtr &) const {
   MS_EXCEPTION_IF_NULL(func_graph);
   MS_EXCEPTION_IF_NULL(node);
+
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::MomentumLossscaleFusion)) {
+    return node;
+  }
+
   auto cnode = node->cast<CNodePtr>();
   MS_EXCEPTION_IF_NULL(cnode);
   CheckCNodeInputSize(cnode, kApplyMomentumInputTensorNum);
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/mul_add_fusion.cc b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/mul_add_fusion.cc
index 85599b69756..e5c91e3aac9 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/mul_add_fusion.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/mul_add_fusion.cc
@@ -21,6 +21,7 @@
 #include "backend/session/anf_runtime_algorithm.h"
 #include "frontend/optimizer/opt.h"
 #include "backend/optimizer/common/helper.h"
+#include "runtime/device/ascend/lic_manager.h"
 
 namespace mindspore {
 namespace opt {
@@ -70,6 +71,11 @@ const AnfNodePtr MulAddFusion::Process(const FuncGraphPtr &graph, const AnfNodeP
   if (add == nullptr || AnfAlgo::GetInputTensorNum(add) != kAddInputTensorNum) {
     return nullptr;
   }
+
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::MulAddFusion)) {
+    return node;
+  }
+
   CNodePtr mul = nullptr;
   size_t mul_index = 0;
   if (!GetMul(graph, add, &mul, &mul_index) || mul == nullptr || mul_index == 0) {
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/reshape_transpose_fusion.cc b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/reshape_transpose_fusion.cc
index e846ab4e65a..e93f45106fd 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/reshape_transpose_fusion.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/reshape_transpose_fusion.cc
@@ -20,6 +20,7 @@
 #include "utils/utils.h"
 #include "backend/optimizer/common/helper.h"
 #include "base/core_ops.h"
+#include "runtime/device/ascend/lic_manager.h"
 
 namespace mindspore {
 namespace opt {
@@ -66,6 +67,11 @@ const AnfNodePtr ReshapeTransposeFusion::Process(const FuncGraphPtr &func_graph,
   if (!CheckShapeDimInfo(reshape_input0_shape) || !CheckShapeDimInfo(transpose_output0_shape)) {
     return nullptr;
   }
+
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::ReshapeTransposeFusion)) {
+    return node;
+  }
+
   auto prim = std::make_shared<Primitive>(kConfusionTransposeDOpName);
   std::vector<AnfNodePtr> inputs = {NewValueNode(prim), utils::cast<AnfNodePtr>((*equiv)[input_varptr_])};
   auto new_node = func_graph->NewCNode(inputs);
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/softmax_grad_ext_fusion.cc b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/softmax_grad_ext_fusion.cc
index a3706bfb68d..87dbb99446f 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/softmax_grad_ext_fusion.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/softmax_grad_ext_fusion.cc
@@ -19,6 +19,7 @@
 #include "ir/primitive.h"
 #include "utils/utils.h"
 #include "backend/optimizer/common/helper.h"
+#include "runtime/device/ascend/lic_manager.h"
 
 namespace mindspore {
 namespace opt {
@@ -54,6 +55,11 @@ const AnfNodePtr SoftmaxGradExtFusion::Process(const FuncGraphPtr &graph, const
   MS_EXCEPTION_IF_NULL(graph);
   MS_EXCEPTION_IF_NULL(equiv);
   MS_EXCEPTION_IF_NULL(node);
+
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::SoftmaxGradExtFusion)) {
+    return node;
+  }
+
   auto input0 = GetAnfNodeByVar(equiv, input0_);
   auto input1 = GetAnfNodeByVar(equiv, input1_);
   auto input2 = GetAnfNodeByVar(equiv, input2_);
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/square_sum_fusion.cc b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/square_sum_fusion.cc
index c660ebfc3be..24d11291da6 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/square_sum_fusion.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/square_sum_fusion.cc
@@ -26,6 +26,8 @@
 #include "backend/optimizer/common/helper.h"
 #include "runtime/device/kernel_info.h"
 #include "utils/trace_base.h"
+#include "runtime/device/ascend/lic_manager.h"
+
 namespace mindspore {
 namespace opt {
 namespace {
@@ -97,6 +99,11 @@ const BaseRef SquareSumFusion::DefinePattern() const {
 const AnfNodePtr SquareSumFusion::Process(const FuncGraphPtr &graph, const AnfNodePtr &node, const EquivPtr &) const {
   MS_EXCEPTION_IF_NULL(graph);
   MS_EXCEPTION_IF_NULL(node);
+
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::SquareSumFusion)) {
+    return node;
+  }
+
   CNodePtr sum = nullptr;
   AnfNodePtr square_anf = nullptr;
   CNodePtr square = nullptr;
diff --git a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/transpose_reshape_fusion.cc b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/transpose_reshape_fusion.cc
index 94af9176049..213be3e3579 100644
--- a/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/transpose_reshape_fusion.cc
+++ b/mindspore/ccsrc/backend/optimizer/ascend/ir_fusion/transpose_reshape_fusion.cc
@@ -20,6 +20,7 @@
 #include "utils/utils.h"
 #include "backend/optimizer/common/helper.h"
 #include "base/core_ops.h"
+#include "runtime/device/ascend/lic_manager.h"
 
 namespace mindspore {
 namespace opt {
@@ -48,6 +49,11 @@ const AnfNodePtr TransposeReshapeFusion::Process(const FuncGraphPtr &func_graph,
                                                  const EquivPtr &equiv) const {
   MS_EXCEPTION_IF_NULL(func_graph);
   MS_EXCEPTION_IF_NULL(equiv);
+
+  if (!LicManager::GetInstance().GetPassSwitch(OptPassEnum::TransposeReshapeFusion)) {
+    return node;
+  }
+
   auto reshape_cnode = CheckAnfNodeIfCNodeAndInputSize(node, kBackendReshapeInputTensorNum);
   MS_EXCEPTION_IF_NULL(reshape_cnode);
   auto transpose_cnode = CheckAnfNodeIfCNodeAndInputSize(reshape_cnode->input(1), kBackendReshapeInputTensorNum);
diff --git a/mindspore/ccsrc/cxx_api/CMakeLists.txt b/mindspore/ccsrc/cxx_api/CMakeLists.txt
index 76ceab48899..7ec2a5f28ec 100644
--- a/mindspore/ccsrc/cxx_api/CMakeLists.txt
+++ b/mindspore/ccsrc/cxx_api/CMakeLists.txt
@@ -51,14 +51,15 @@ set_target_properties(mindspore_shared_lib PROPERTIES OUTPUT_NAME mindspore)
 
 if(CMAKE_SYSTEM_NAME MATCHES "Darwin")
     target_link_libraries(mindspore_shared_lib PRIVATE ${PYTHON_LIBRARIES} ${SECUREC_LIBRARY}
-            -Wl,-force_load mindspore -Wl,-noall_load proto_input mindspore_gvar mindspore::protobuf)
+            -Wl,-force_load mindspore -Wl,-noall_load mindspore_core proto_input mindspore_gvar mindspore::protobuf)
 else()
     if(ENABLE_D OR ENABLE_GPU)
         target_link_libraries(mindspore_shared_lib PRIVATE -Wl,--as-needed ${PYTHON_LIBRARIES} ${SECUREC_LIBRARY}
-            -Wl,--whole-archive mindspore -Wl,--no-whole-archive proto_input mindspore_gvar mindspore::protobuf)
+            -Wl,--whole-archive mindspore -Wl,--no-whole-archive mindspore_core proto_input mindspore_gvar
+            mindspore::protobuf)
     else()
         target_link_libraries(mindspore_shared_lib PRIVATE ${PYTHON_LIBRARIES} ${SECUREC_LIBRARY}
-          mindspore proto_input mindspore_gvar mindspore::protobuf)
+          mindspore mindspore_core proto_input mindspore_gvar mindspore::protobuf)
     endif()
 endif()
 
diff --git a/mindspore/ccsrc/debug/data_dump/dump_json_parser.cc b/mindspore/ccsrc/debug/data_dump/dump_json_parser.cc
index a69a1a0ed2c..5278943be76 100644
--- a/mindspore/ccsrc/debug/data_dump/dump_json_parser.cc
+++ b/mindspore/ccsrc/debug/data_dump/dump_json_parser.cc
@@ -174,7 +174,7 @@ void DumpJsonParser::CopyMSCfgJsonToDir(uint32_t rank_id) {
     auto context = MsContext::GetInstance();
     MS_EXCEPTION_IF_NULL(context);
     ms_info["device_target"] = context->get_param<std::string>(MS_CTX_DEVICE_TARGET);
-    ms_info["ms_version"] = "1.2.0";
+    ms_info["ms_version"] = "1.3.0";
     const std::string file_path = realpath.value();
     ChangeFileMode(file_path, S_IWUSR);
     std::ofstream json_create(file_path);
diff --git a/mindspore/ccsrc/debug/debugger/debugger.cc b/mindspore/ccsrc/debug/debugger/debugger.cc
index 05ab1ac963b..8b45ca0065c 100644
--- a/mindspore/ccsrc/debug/debugger/debugger.cc
+++ b/mindspore/ccsrc/debug/debugger/debugger.cc
@@ -113,7 +113,7 @@ void Debugger::Init(const uint32_t device_id, const std::string device_target) {
   device_id_ = device_id;
   MS_LOG(INFO) << "Debugger got device_target: " << device_target;
   device_target_ = device_target;
-  version_ = "1.2.0";
+  version_ = "1.3.0";
 }
 
 bool IsTypeDebuggerSupported(TypeId type) {
diff --git a/mindspore/ccsrc/debug/debugger/offline_debug/dbg_services.cc b/mindspore/ccsrc/debug/debugger/offline_debug/dbg_services.cc
index 8cc08fb8e25..3d062008e6f 100644
--- a/mindspore/ccsrc/debug/debugger/offline_debug/dbg_services.cc
+++ b/mindspore/ccsrc/debug/debugger/offline_debug/dbg_services.cc
@@ -48,7 +48,7 @@ DbgServices::~DbgServices() {
 
 std::string DbgServices::GetVersion() {
   MS_LOG(INFO) << "get version is called";
-  return "1.2.0";
+  return "1.3.0";
 }
 
 int32_t DbgServices::Initialize(std::string net_name, std::string dump_folder_path, bool is_sync_mode) {
diff --git a/mindspore/ccsrc/runtime/device/ascend/lic_manager.cc b/mindspore/ccsrc/runtime/device/ascend/lic_manager.cc
new file mode 100644
index 00000000000..e5d79ed9838
--- /dev/null
+++ b/mindspore/ccsrc/runtime/device/ascend/lic_manager.cc
@@ -0,0 +1,127 @@
+/**
+ * Copyright 2021 Huawei Technologies Co., Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "runtime/device/ascend/lic_manager.h"
+#include <regex>
+#include "utils/ms_context.h"
+#include "runtime/dev.h"
+
+namespace gelc {
+uint32_t GetOptInfo(uint32_t, const std::string &, std::map<std::string, std::string> &);  // NOLINT
+}  // namespace gelc
+
+namespace mindspore {
+namespace {
+constexpr auto kFeKey = "opt_module.fe";
+constexpr auto kOpTuneKey = "opt_module.op_tune";
+constexpr auto kAllOpen = "ALL";
+
+static const std::map<std::string, OptPassEnum> kPassCodeMap = {
+  {std::to_string(3), OptPassEnum::MatmulBiasaddFusion},
+  {std::to_string(8), OptPassEnum::DereluFusion},
+  {std::to_string(9), OptPassEnum::TransposeReshapeFusion},
+  {std::to_string(10), OptPassEnum::MomentumLossscaleFusion},
+  {std::to_string(12), OptPassEnum::FusedBatchNormFusion},
+  {std::to_string(15), OptPassEnum::BnupdateEltwiseEltwiseFusionPass},
+  {std::to_string(16), OptPassEnum::BnupdateEltwiseFusionPass},
+  {std::to_string(17), OptPassEnum::Conv2DBackpropEltwiseFusionPass},
+  {std::to_string(18), OptPassEnum::ConvBnReduceFusionPass},
+  {std::to_string(26), OptPassEnum::ReshapeTransposeFusion},
+  {std::to_string(27), OptPassEnum::SquareSumFusion},
+  {std::to_string(30), OptPassEnum::MatmulEltwiseFusionPass},
+  {std::to_string(33), OptPassEnum::BatchMatmulFusedMulAddFusionPass},
+  {std::to_string(34), OptPassEnum::EltwiseFusionPass},
+  {std::to_string(36), OptPassEnum::MultiOutputFusionPass},
+  {std::to_string(37), OptPassEnum::MulAddFusion},
+  {std::to_string(38), OptPassEnum::SoftmaxGradExtFusion},
+  {std::to_string(39), OptPassEnum::ClipByNormNoDivSquareSumFusion},
+};
+
+inline std::vector<std::string> SplitStrByRegex(const std::string &str, const std::string &regex) {
+  std::regex split(regex);
+  return std::vector<std::string>(std::sregex_token_iterator(str.begin(), str.end(), split, -1),
+                                  std::sregex_token_iterator());
+}
+
+static std::string GetSocVersion() {
+  constexpr int kSocVersionLen = 50;
+  char soc_version[kSocVersionLen] = {0};
+  auto ret = rtGetSocVersion(soc_version, kSocVersionLen);
+  if (ret != RT_ERROR_NONE) {
+    MS_LOG(WARNING) << "rtGetSocVersion failed, ret = " << ret;
+    return "Ascend910";
+  }
+
+  return soc_version;
+}
+}  // namespace
+
+LicManager &LicManager::GetInstance() {
+  static LicManager lic_manager{};
+  return lic_manager;
+}
+
+bool LicManager::GetPassSwitch(OptPassEnum pass) {
+  if (!init_flag) {
+    ParseSwitch();
+  }
+  auto iter = pass_switch_.find(pass);
+  if (iter == pass_switch_.end()) {
+    return true;
+  }
+
+  return iter->second;
+}
+
+void LicManager::ParseSwitch() {
+  std::map<std::string, std::string> opt_info_map;
+  auto ret = gelc::GetOptInfo(0, GetSocVersion(), opt_info_map);
+  if (ret != 0) {
+    MS_LOG(WARNING) << "GetOptInfo failed.";
+    init_flag = true;
+    return;
+  }
+
+  auto iter = opt_info_map.find(kFeKey);
+  if (iter != opt_info_map.end()) {
+    ParseFeSwitch(iter->second);
+  }
+
+  init_flag = true;
+}
+
+void LicManager::ParseFeSwitch(const std::string &options_str) {
+  // invalid options, do nothing.
+  if (options_str.empty()) {
+    return;
+  }
+  // "All" in options means all open, do nothing.
+  if (options_str.find(kAllOpen) != std::string::npos) {
+    return;
+  }
+
+  for (auto iter = kPassCodeMap.begin(); iter != kPassCodeMap.end(); ++iter) {
+    auto pass = iter->second;
+    pass_switch_.emplace(pass, false);
+  }
+  auto fe_pass = SplitStrByRegex(options_str, ":");
+  for (auto &pass_code : fe_pass) {
+    auto iter = kPassCodeMap.find(pass_code);
+    if (iter != kPassCodeMap.end()) {
+      pass_switch_[iter->second] = true;
+    }
+  }
+}
+}  // namespace mindspore
diff --git a/mindspore/ccsrc/runtime/device/ascend/lic_manager.h b/mindspore/ccsrc/runtime/device/ascend/lic_manager.h
new file mode 100644
index 00000000000..386087aabd1
--- /dev/null
+++ b/mindspore/ccsrc/runtime/device/ascend/lic_manager.h
@@ -0,0 +1,59 @@
+/**
+ * Copyright 2021 Huawei Technologies Co., Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef MINDSPORE_CCSRC_RUNTIME_DEVICE_ASCEND_PASS_SWITCH_MANAGER_H_
+#define MINDSPORE_CCSRC_RUNTIME_DEVICE_ASCEND_PASS_SWITCH_MANAGER_H_
+
+#include <memory>
+#include <string>
+#include <map>
+#include <vector>
+
+namespace mindspore {
+enum class OptPassEnum {
+  MatmulBiasaddFusion,
+  MulAddFusion,
+  ReshapeTransposeFusion,
+  SoftmaxGradExtFusion,
+  SquareSumFusion,
+  TransposeReshapeFusion,
+  ClipByNormNoDivSquareSumFusion,
+  MomentumLossscaleFusion,
+  DereluFusion,
+  FusedBatchNormFusion,
+  MatmulEltwiseFusionPass,
+  BatchMatmulFusedMulAddFusionPass,
+  EltwiseFusionPass,
+  MultiOutputFusionPass,
+  BnupdateEltwiseEltwiseFusionPass,
+  BnupdateEltwiseFusionPass,
+  Conv2DBackpropEltwiseFusionPass,
+  ConvBnReduceFusionPass,
+};
+
+class LicManager {
+ public:
+  static LicManager &GetInstance();
+  bool GetPassSwitch(OptPassEnum pass);
+
+ private:
+  void ParseSwitch();
+  void ParseFeSwitch(const std::string &options_str);
+
+  bool init_flag = false;
+  std::map<OptPassEnum, bool> pass_switch_ = {};
+};
+}  // namespace mindspore
+#endif  // MINDSPORE_CCSRC_RUNTIME_DEVICE_ASCEND_PASS_SWITCH_MANAGER_H_
diff --git a/mindspore/run_check/_check_version.py b/mindspore/run_check/_check_version.py
index f1eb67b52c7..c7dfd2c72c8 100644
--- a/mindspore/run_check/_check_version.py
+++ b/mindspore/run_check/_check_version.py
@@ -207,7 +207,7 @@ class AscendEnvChecker(EnvChecker):
     """ascend environment check"""
 
     def __init__(self):
-        self.version = ["1.78.23.0.230"]
+        self.version = ["1.78.23.1.230"]
         atlas_nnae_version = "/usr/local/Ascend/nnae/latest/fwkacllib/version.info"
         atlas_toolkit_version = "/usr/local/Ascend/ascend-toolkit/latest/fwkacllib/version.info"
         hisi_fwk_version = "/usr/local/Ascend/fwkacllib/version.info"
diff --git a/requirements.txt b/requirements.txt
index 967b6fcaad7..8607c2a7f9f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,5 @@
 numpy >= 1.17.0
-protobuf >= 3.8.0
+protobuf >= 3.13.0
 asttokens >= 1.1.13
 pillow >= 6.2.0
 scipy >= 1.5.2
@@ -17,4 +17,4 @@ packaging >= 20.0
 pycocotools >= 2.0.2        # for st test
 tables >= 3.6.1             # for st test
 easydict >= 1.9             # for st test
-psutil >= 5.7.0
\ No newline at end of file
+psutil >= 5.7.0
diff --git a/setup.py b/setup.py
index 24058b2f189..13b4dfbfd2d 100644
--- a/setup.py
+++ b/setup.py
@@ -108,7 +108,7 @@ build_dependencies()
 
 required_package = [
     'numpy >= 1.17.0',
-    'protobuf >= 3.8.0',
+    'protobuf >= 3.13.0',
     'asttokens >= 1.1.13',
     'pillow >= 6.2.0',
     'scipy >= 1.5.2',
diff --git a/tests/st/data_transfer/test_tdt_data_transfer.py b/tests/st/data_transfer/test_tdt_data_transfer.py
index 8b50908df74..de65d06e0c9 100644
--- a/tests/st/data_transfer/test_tdt_data_transfer.py
+++ b/tests/st/data_transfer/test_tdt_data_transfer.py
@@ -126,7 +126,7 @@ def op_network_with_step_num(dataset, step_num):
     return op_network_with_epoch(net_with_dataset, step_num)
 
 
-@pytest.mark.level0
+@pytest.mark.level1
 @pytest.mark.platform_arm_ascend_training
 @pytest.mark.platform_x86_ascend_training
 @pytest.mark.env_onecard
diff --git a/tests/st/explainer/explanation/_attribution/_backprop/test_modified_relu.py b/tests/st/explainer/explanation/_attribution/_backprop/test_modified_relu.py
index f365bc32d4b..8795b87ce61 100644
--- a/tests/st/explainer/explanation/_attribution/_backprop/test_modified_relu.py
+++ b/tests/st/explainer/explanation/_attribution/_backprop/test_modified_relu.py
@@ -51,7 +51,7 @@ class TestModifiedReLU:
         self.abs_ = P.Abs()
         self.reshape = P.Reshape()
 
-    @pytest.mark.level0
+    @pytest.mark.level1
     @pytest.mark.platform_arm_ascend_training
     @pytest.mark.platform_x86_ascend_training
     @pytest.mark.env_onecard
diff --git a/tests/ut/cpp/CMakeLists.txt b/tests/ut/cpp/CMakeLists.txt
index 6022e5754b6..cc48d88e814 100644
--- a/tests/ut/cpp/CMakeLists.txt
+++ b/tests/ut/cpp/CMakeLists.txt
@@ -124,6 +124,7 @@ file(GLOB_RECURSE MINDSPORE_SRC_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR}
         "../../../mindspore/ccsrc/runtime/device/ascend/ascend_memory_manager.cc"
         "../../../mindspore/ccsrc/runtime/device/ascend/ascend_device_address.cc"
         "../../../mindspore/ccsrc/runtime/device/ascend/ascend_memory_pool.cc"
+        "../../../mindspore/ccsrc/runtime/device/ascend/lic_manager.cc"
         "../../../mindspore/ccsrc/backend/kernel_compiler/cpu/cpu_kernel.cc"
         "../../../mindspore/ccsrc/backend/kernel_compiler/cpu/cpu_kernel_factory.cc"
         "../../../mindspore/ccsrc/backend/kernel_compiler/cpu/sparse_apply_adam_cpu_kernel.cc"
diff --git a/tests/ut/cpp/stub/ge/ge_mock.cc b/tests/ut/cpp/stub/ge/ge_mock.cc
index e206f8bc930..ed32606bb32 100644
--- a/tests/ut/cpp/stub/ge/ge_mock.cc
+++ b/tests/ut/cpp/stub/ge/ge_mock.cc
@@ -51,4 +51,10 @@ Status GEFinalize() { return ge::GRAPH_SUCCESS; }
 Status Graph::SaveToFile(const string& file_name) const { return ge::GRAPH_SUCCESS; }
 
 }  // namespace ge
+
+namespace gelc {
+uint32_t GetOptInfo(uint32_t mode, const std::string &soc_ver, std::map<std::string, std::string> &opt_info_map) {
+  return 0;
+}
+}  // namespace gelc
 #endif