!315 modify init_ge api name and add init_ge

Merge pull request !315 from jinyaohui/init_dataset
This commit is contained in:
mindspore-ci-bot 2020-04-27 11:47:49 +08:00 committed by Gitee
commit e42f7601b6
8 changed files with 36 additions and 13 deletions

@ -1 +1 @@
Subproject commit 0c33e9d12562953ca4bd6c03cb77da2c2da74acd Subproject commit 43f5d24337bf785251eefae2d810c7d5684194d6

View File

@ -97,7 +97,7 @@ PYBIND11_MODULE(_c_expression, m) {
py::arg("batch_size"), py::arg("types"), py::arg("shapes"), py::arg("input_indexs"), py::arg("batch_size"), py::arg("types"), py::arg("shapes"), py::arg("input_indexs"),
py::arg("phase") = py::str("dataset"), "Init and exec dataset."); py::arg("phase") = py::str("dataset"), "Init and exec dataset.");
(void)m.def("_set_dataset_mode_config", &mindspore::ConfigManager::SetDatasetModeConfig, "API for set dataset mode."); (void)m.def("_set_dataset_mode_config", &mindspore::ConfigManager::SetDatasetModeConfig, "API for set dataset mode.");
(void)m.def("init_ge", &mindspore::pipeline::InitGe, "Init GE"); (void)m.def("init_backend", &mindspore::pipeline::InitBackend, "Init Backend.");
(void)m.def("export_graph", &mindspore::pipeline::ExportGraph, "Export Graph."); (void)m.def("export_graph", &mindspore::pipeline::ExportGraph, "Export Graph.");

View File

@ -236,7 +236,7 @@ py::dict ExecutorPy::GetAllreduceFusion(const std::string &phase) {
void ExecutorPy::DelNetRes(const std::string &id) { void ExecutorPy::DelNetRes(const std::string &id) {
#ifdef ENABLE_GE #ifdef ENABLE_GE
FinalizeGe(); FinalizeBackend();
#endif #endif
if (executor_ != nullptr) { if (executor_ != nullptr) {
bool flag = false; bool flag = false;
@ -680,6 +680,13 @@ bool InitExecDataset(const std::string &queue_name, int64_t iter_num, int64_t ba
const std::vector<TypePtr> &types, const std::vector<std::vector<int64_t>> &shapes, const std::vector<TypePtr> &types, const std::vector<std::vector<int64_t>> &shapes,
const std::vector<int64_t> &input_indexes, const std::string &phase) { const std::vector<int64_t> &input_indexes, const std::string &phase) {
std::string name = MsContext::GetInstance()->backend_policy(); std::string name = MsContext::GetInstance()->backend_policy();
#ifndef NO_DLIB
auto ms_context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(ms_context);
if (!ms_context->IsTsdOpened() || !ms_context->IsGeInited()) {
(void)InitBackend();
}
#endif
if (name == kMsConvert || name == kMsVm) { if (name == kMsConvert || name == kMsVm) {
return InitExecDatasetVm(queue_name, iter_num, batch_size, types, shapes, input_indexes); return InitExecDatasetVm(queue_name, iter_num, batch_size, types, shapes, input_indexes);
} }
@ -758,7 +765,7 @@ void ResetOpId() { mindspore::id_generator::reset_id(); }
void InitHccl() { void InitHccl() {
#ifdef ENABLE_GE #ifdef ENABLE_GE
(void)InitGe(); (void)InitBackend();
#else #else
mindspore::parse::python_adapter::set_python_env_flag(true); mindspore::parse::python_adapter::set_python_env_flag(true);
auto ms_context = MsContext::GetInstance(); auto ms_context = MsContext::GetInstance();
@ -780,7 +787,7 @@ void InitHccl() {
void FinalizeHccl() { void FinalizeHccl() {
#ifdef ENABLE_GE #ifdef ENABLE_GE
(void)FinalizeGe(); (void)FinalizeBackend();
#else #else
device::KernelRuntimeManager::Instance().ClearRuntimeResource(); device::KernelRuntimeManager::Instance().ClearRuntimeResource();
#endif #endif
@ -801,7 +808,7 @@ void ReleaseGeTsd() {
} }
} }
void InitGe() { void InitBackend() {
// set python env flag // set python env flag
mindspore::parse::python_adapter::set_python_env_flag(true); mindspore::parse::python_adapter::set_python_env_flag(true);
// open tsd before ge initialize // open tsd before ge initialize
@ -813,7 +820,7 @@ void InitGe() {
(void)ms_context->InitGe(); (void)ms_context->InitGe();
} }
void FinalizeGe() { void FinalizeBackend() {
auto context_ptr = MsContext::GetInstance(); auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr); MS_EXCEPTION_IF_NULL(context_ptr);
(void)context_ptr->FinalizeGe(); (void)context_ptr->FinalizeGe();

View File

@ -116,8 +116,8 @@ bool InitDistribute(const std::map<std::string, std::string> &options);
void ResetOpId(); void ResetOpId();
void InitHccl(); void InitHccl();
void FinalizeHccl(); void FinalizeHccl();
void InitGe(); void InitBackend();
void FinalizeGe(); void FinalizeBackend();
void ClearResAtexit(); void ClearResAtexit();
void ReleaseGeTsd(); void ReleaseGeTsd();

View File

@ -439,4 +439,18 @@ bool MsContext::PynativeInitGe() {
is_pynative_ge_init_ = true; is_pynative_ge_init_ = true;
return true; return true;
} }
bool MsContext::IsTsdOpened() {
if (tsd_ref_ > 0) {
return true;
}
return false;
}
bool MsContext::IsGeInited() {
if (ge_ref_ > 0) {
return true;
}
return false;
}
} // namespace mindspore } // namespace mindspore

View File

@ -82,8 +82,10 @@ class MsContext {
bool OpenTsd(); bool OpenTsd();
bool CloseTsd(bool force = false); bool CloseTsd(bool force = false);
bool IsTsdOpened();
bool InitGe(); bool InitGe();
bool FinalizeGe(bool force = false); bool FinalizeGe(bool force = false);
bool IsGeInited();
void set_enable_hccl(bool enable_hccl) { enable_hccl_ = enable_hccl; } void set_enable_hccl(bool enable_hccl) { enable_hccl_ = enable_hccl; }
bool enable_hccl() const { return enable_hccl_; } bool enable_hccl() const { return enable_hccl_; }
bool PynativeInitGe(); bool PynativeInitGe();

View File

@ -22,7 +22,7 @@ from mindspore import context
from mindspore import log as logger from mindspore import log as logger
from mindspore.parallel._utils import _get_parallel_mode from mindspore.parallel._utils import _get_parallel_mode
from .._c_expression import generate_key, Executor_, Tensor, MetaTensor from .._c_expression import generate_key, Executor_, Tensor, MetaTensor
from .._c_expression import verify_inputs_signature, init_exec_dataset, _set_dataset_mode_config, init_ge from .._c_expression import verify_inputs_signature, init_exec_dataset, _set_dataset_mode_config, init_backend
from .tensor import Tensor as MsTensor from .tensor import Tensor as MsTensor
# store ms_function class compiled pipeline cache # store ms_function class compiled pipeline cache
@ -184,7 +184,7 @@ class _MindSporeFunction:
@_wrap_func @_wrap_func
def __call__(self, *args): def __call__(self, *args):
init_ge() init_backend()
converted, arguments_dict, parse_method = _convert_function_arguments(self.fn, *args) converted, arguments_dict, parse_method = _convert_function_arguments(self.fn, *args)
if not converted: if not converted:
raise RuntimeError('Process function parameter is failure') raise RuntimeError('Process function parameter is failure')

View File

@ -22,7 +22,7 @@ from ..common import dtype as mstype
from ..common.api import _executor from ..common.api import _executor
from .._checkparam import _check_str_by_regular from .._checkparam import _check_str_by_regular
from ..common.parameter import Parameter, ParameterTuple from ..common.parameter import Parameter, ParameterTuple
from .._c_expression import init_ge from .._c_expression import init_backend
from ..ops.primitive import Primitive from ..ops.primitive import Primitive
from ..parallel._tensor import _load_tensor_by_layout from ..parallel._tensor import _load_tensor_by_layout
from ..parallel._utils import _get_parallel_mode from ..parallel._utils import _get_parallel_mode
@ -66,7 +66,7 @@ class Cell:
self._phase = 'train' self._phase = 'train'
self._parameter_layout_dict = {} self._parameter_layout_dict = {}
self._create_time = int(time.time() * 1e9) self._create_time = int(time.time() * 1e9)
init_ge() init_backend()
# call gc to release GE session resources used by non-used cell objects # call gc to release GE session resources used by non-used cell objects
gc.collect() gc.collect()
self._construct_inputs_num = 0 self._construct_inputs_num = 0