!5058 remove backend/session dependency on python

Merge pull request !5058 from xychow/remove-backend-py-dependency-1
This commit is contained in:
mindspore-ci-bot 2020-09-11 10:58:48 +08:00 committed by Gitee
commit 9a5e074c48
32 changed files with 266 additions and 121 deletions

View File

@ -24,7 +24,6 @@
#include "utils/ms_utils.h"
#include "common/trans.h"
#include "utils/config_manager.h"
#include "utils/base_ref_extends.h"
namespace mindspore {
namespace session {

View File

@ -20,6 +20,7 @@
#include <set>
#include <string>
#include <list>
#include "base/core_ops.h"
#include "ir/tensor.h"
#include "ir/anf.h"
@ -33,15 +34,14 @@
#include "runtime/device/kernel_adjust.h"
#include "runtime/device/ascend/ascend_stream_assign.h"
#include "backend/session/anf_runtime_algorithm.h"
#include "debug/anf_ir_dump.h"
#include "debug/dump_proto.h"
#include "utils/ms_utils.h"
#include "backend/optimizer/common/helper.h"
#include "runtime/device/kernel_runtime_manager.h"
#include "utils/config_manager.h"
#include "utils/base_ref_extends.h"
#include "debug/tensor_load.h"
#include "backend/optimizer/graph_kernel/basic_ops_fusion.h"
#include "debug/anf_ir_dump.h"
#include "debug/dump_proto.h"
namespace mindspore {
namespace session {

0
mindspore/ccsrc/backend/session/ascend_session.h Executable file → Normal file
View File

View File

@ -17,6 +17,7 @@
#include "runtime/device/kernel_runtime_manager.h"
#include "backend/session/executor_manager.h"
#include "utils/comm_manager.h"
#include "utils/scoped_long_running.h"
namespace mindspore {
namespace session {
@ -223,7 +224,7 @@ void Executor::RunGraphAsync(const SessionPtr &session, const GraphId &graph_id,
std::unique_lock<std::mutex> lock(task_mutex_);
ready_tasks_.push(task);
task_cond_var_.notify_all();
py::gil_scoped_release release;
mindspore::ScopedLongRunning long_running;
sync_cond_var_.wait(lock);
CheckException();
}

View File

@ -13,8 +13,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "debug/dump_proto.h"
#include "backend/session/gpu_session.h"
#include "runtime/device/gpu/kernel_info_setter.h"
#include "runtime/device/gpu/gpu_kernel_build.h"
#include "runtime/device/gpu/gpu_kernel_runtime.h"
@ -47,8 +47,8 @@
#include "utils/ms_utils.h"
#include "common/trans.h"
#include "utils/ms_context.h"
#include "utils/base_ref_extends.h"
#include "debug/tensor_load.h"
#include "debug/dump_proto.h"
namespace mindspore {
namespace session {

View File

@ -17,6 +17,8 @@
#include "backend/session/infer_session.h"
#include <memory>
#include <algorithm>
#include <fstream>
#include "include/inference.h"
#include "utils/load_onnx/anf_converter.h"
#include "backend/session/session_basic.h"
@ -27,6 +29,8 @@
#include "utils/context/context_extends.h"
#include "runtime/device/kernel_runtime_manager.h"
#include "pybind11/pybind11.h"
#ifdef ENABLE_D
#include "utils/ms_context.h"
#endif

View File

@ -17,8 +17,8 @@
#include <utility>
#include <algorithm>
#include <unordered_map>
#include "c_ops/primitive_c.h"
#include "pipeline/jit/parse/data_converter.h"
#include "ir/manager.h"
#include "ir/param_info.h"
#include "backend/kernel_compiler/common_utils.h"
@ -1286,7 +1286,7 @@ std::shared_ptr<KernelGraph> SessionBasic::ConstructSingleOpGraph(const OpRunInf
auto graph = std::make_shared<KernelGraph>();
std::vector<AnfNodePtr> inputs;
// set input[0]
PrimitivePtr op_prim = op_run_info.py_primitive;
PrimitivePtr op_prim = op_run_info.primitive;
MS_EXCEPTION_IF_NULL(op_prim);
inputs.push_back(std::make_shared<ValueNode>(op_prim));
// set input parameter

View File

@ -22,7 +22,7 @@
#include <utility>
#include <memory>
#include <map>
#include "utils/base_ref_extends.h"
#include "backend/session/session_context.h"
#include "backend/session/kernel_graph.h"
#include "backend/session/anf_runtime_algorithm.h"
@ -30,7 +30,6 @@
#include "ir/tensor.h"
#include "utils/any.h"
#include "utils/contract.h"
#include "pipeline/pynative/pynative_execute.h"
#include "runtime/device/kernel_info.h"
#include "utils/ms_context.h"
#ifdef ENABLE_DEBUGGER
@ -47,7 +46,12 @@ using CallBackFunc = uint32_t (*)(uint32_t graph_id,
using AnyList = std::vector<Any>;
using AnyListPtr = std::shared_ptr<AnyList>;
using OpRunInfo = pynative::OpExecInfo;
struct OpRunInfo {
std::string op_name;
PrimitivePtr primitive;
AbstractBasePtr abstract;
ValuePtr value = nullptr;
};
using OpRunInfoPtr = std::shared_ptr<OpRunInfo>;
class Executor;
class SessionBasic : public std::enable_shared_from_this<SessionBasic> {

View File

@ -23,7 +23,7 @@
#include <string>
#include "ir/tensor.h"
#include "pipeline/jit/resource.h"
#include "pipeline/jit/resource_base.h"
#include "utils/ms_context.h"
namespace mindspore {
namespace session {

View File

@ -15,17 +15,19 @@
*/
#include "debug/draw.h"
#include <algorithm>
#include <iostream>
#include <iterator>
#include <vector>
#include <string>
#include <algorithm>
#include "ir/meta_func_graph.h"
#include "ir/param_info.h"
#include "ir/primitive.h"
#include "ir/graph_utils.h"
#include "utils/utils.h"
#include "frontend/operator/composite/composite.h"
#include "pipeline/jit/parse/resolve.h"
#include "ir/tensor.h"
namespace mindspore {

View File

@ -22,13 +22,10 @@
#include <vector>
#include "ir/anf.h"
#include "utils/any.h"
#include "pipeline/jit/parse/resolve.h"
namespace mindspore {
namespace draw {
namespace parse = mindspore::parse;
class Graphviz {
public:
Graphviz(const std::string &name, const std::string &filename) : name_(name), filename_(filename), fout_(filename_) {}

View File

@ -37,6 +37,9 @@
#include "frontend/parallel/ps/util.h"
#endif
#include "frontend/parallel/ps/ps_context.h"
#include "pybind_api/gil_scoped_long_running.h"
namespace py = pybind11;
using EnvInstance = mindspore::EnvInstance;
@ -61,6 +64,8 @@ PYBIND11_MODULE(_c_expression, m) {
item.second(&m);
}
mindspore::ScopedLongRunning::SetHook(std::make_unique<mindspore::GilScopedLongRunningHook>());
// Class Pipeline interface
(void)py::class_<ExecutorPy, std::shared_ptr<ExecutorPy>>(m, "Executor_")
.def_static("get_instance", &ExecutorPy::GetInstance, "Executor get_instance.")

View File

@ -41,6 +41,7 @@
#include "runtime/device/kernel_runtime_manager.h"
#include "backend/session/executor_manager.h"
#include "debug/trace.h"
#include "debug/draw.h"
#include "pipeline/pynative/pynative_execute.h"
#include "frontend/optimizer/py_pass_manager.h"
#include "pybind_api/pybind_patch.h"

View File

@ -25,8 +25,8 @@
#include <map>
#include <mutex>
#include "utils/base_ref_extends.h"
#include "debug/draw.h"
#include "pybind11/pybind11.h"
#include "ir/anf.h"
#include "ir/tensor.h"
#include "pipeline/jit/action.h"

View File

@ -29,6 +29,8 @@
#include "utils/any.h"
#include "utils/profile.h"
#include "ir/manager.h"
#include "pipeline/jit/resource_base.h"
#include "pipeline/jit/static_analysis/prim.h"
#include "pipeline/jit/static_analysis/static_analysis.h"
@ -50,37 +52,6 @@ BuiltInTypeMap &GetMethodMap();
BuiltInTypeMap &GetAttrMap();
class ResourceBase {
public:
ResourceBase() { manager_ = MakeManager(); }
virtual ~ResourceBase() = default;
FuncGraphManagerPtr manager() { return manager_; }
// set a manager defined outside which will not manage the graphs.
void set_manager(const FuncGraphManagerPtr &manager) { manager_ = manager; }
std::unordered_map<std::string, Any> &results() { return results_; }
void SetResult(const std::string &key, const Any &value) { results_[key] = value; }
Any GetResult(const std::string &key) {
if (results_.count(key) == 0) {
MS_LOG(EXCEPTION) << "this key is not in resource list:" << key;
}
return results_[key];
}
bool HasResult(const std::string &key) const { return results_.count(key) != 0; }
std::unordered_map<std::string, Any> results_;
protected:
FuncGraphManagerPtr manager_;
};
using ResourceBasePtr = std::shared_ptr<pipeline::ResourceBase>;
class Resource : public ResourceBase {
public:
explicit Resource(const py::object &obj = py::none());

View File

@ -0,0 +1,66 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_PIPELINE_JIT_RESOURCE_BASE_H_
#define MINDSPORE_CCSRC_PIPELINE_JIT_RESOURCE_BASE_H_
#include <iostream>
#include <vector>
#include <string>
#include <unordered_map>
#include <memory>
#include "utils/any.h"
#include "ir/manager.h"
namespace mindspore {
namespace pipeline {
class ResourceBase {
public:
ResourceBase() { manager_ = MakeManager(); }
virtual ~ResourceBase() = default;
FuncGraphManagerPtr manager() { return manager_; }
// set a manager defined outside which will not manage the graphs.
void set_manager(const FuncGraphManagerPtr &manager) { manager_ = manager; }
std::unordered_map<std::string, Any> &results() { return results_; }
void SetResult(const std::string &key, const Any &value) { results_[key] = value; }
Any GetResult(const std::string &key) {
if (results_.count(key) == 0) {
MS_LOG(EXCEPTION) << "this key is not in resource list:" << key;
}
return results_[key];
}
bool HasResult(const std::string &key) const { return results_.count(key) != 0; }
std::unordered_map<std::string, Any> results_;
protected:
FuncGraphManagerPtr manager_;
};
using ResourceBasePtr = std::shared_ptr<pipeline::ResourceBase>;
} // namespace pipeline
} // namespace mindspore
#endif // MINDSPORE_CCSRC_PIPELINE_JIT_RESOURCE_BASE_H_

View File

@ -34,7 +34,6 @@
#include "utils/context/context_extends.h"
#include "utils/config_manager.h"
#include "utils/convert_utils_py.h"
#include "utils/base_ref_extends.h"
#include "frontend/operator/ops.h"
#include "frontend/operator/composite/composite.h"
#include "frontend/operator/composite/do_signature.h"
@ -605,21 +604,13 @@ py::object RunOpInMs(const OpExecInfoPtr &op_exec_info, PynativeStatusCode *stat
ConstructInputTensor(op_exec_info, &tensors_mask, &input_tensors);
// get graph info for checking it whether existing in the cache
std::string graph_info = GetSingleOpGraphInfo(op_exec_info, input_tensors);
session->BuildOpAsync(op_exec_info.get(), graph_info, input_tensors, tensors_mask);
session::OpRunInfo op_run_info = {op_exec_info->op_name, op_exec_info->py_primitive, op_exec_info->abstract,
op_exec_info->value};
session->BuildOpAsync(&op_run_info, graph_info, input_tensors, tensors_mask);
EraseValueNodeTensor(tensors_mask, &input_tensors);
VectorRef outputs;
session->RunOpAsync(op_exec_info.get(), graph_info, input_tensors, &outputs);
// Trans output to tuple
auto output_tensors = TransformBaseRefListToTuple(outputs);
if (!utils::isa<PyObjectRef>(output_tensors) ||
!py::isinstance<py::tuple>(utils::cast<PyObjectRef>(output_tensors).object_)) {
MS_EXCEPTION(NotSupportError) << "The output tensors should be a tuple !";
}
py::object tuple_obj = utils::cast<PyObjectRef>(output_tensors).object_;
py::tuple result = py::cast<py::tuple>(tuple_obj);
session->RunOpAsync(&op_run_info, graph_info, input_tensors, &outputs);
auto result = BaseRefToPyData(outputs);
ms_context->set_param<bool>(MS_CTX_ENABLE_PYNATIVE_INFER, false);
*status = PYNATIVE_SUCCESS;
MS_LOG(INFO) << "End run op[" << op_exec_info->op_name << "] with backend policy ms";

View File

@ -29,6 +29,7 @@
#include "pybind11/pybind11.h"
#include "pybind11/numpy.h"
#include "pybind_api/ir/base_ref_py.h"
#include "pipeline/pynative/base.h"
#include "utils/ms_context.h"
#include "ir/anf.h"

View File

@ -0,0 +1,38 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef PYBIND_API_GIL_SCOPED_LONG_RUNNING_H_
#define PYBIND_API_GIL_SCOPED_LONG_RUNNING_H_
#include <memory>
#include "pybind11/pybind11.h"
#include "utils/scoped_long_running.h"
namespace py = pybind11;
namespace mindspore {
class GilScopedLongRunningHook : public ScopedLongRunningHook {
public:
void Enter() override { release_ = std::make_unique<py::gil_scoped_release>(); }
void Leave() override { release_ = nullptr; }
private:
std::unique_ptr<py::gil_scoped_release> release_;
};
} // namespace mindspore
#endif // PYBIND_API_GIL_SCOPED_LONG_RUNNING_H_

View File

@ -14,7 +14,7 @@
* limitations under the License.
*/
#include "utils/base_ref_extends.h"
#include "pybind_api/ir/base_ref_py.h"
namespace mindspore {
bool PyObjectRef::operator==(const BaseRef &other) const {

View File

@ -13,12 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_UTILS_BASE_REF_PY_H_
#define MINDSPORE_CCSRC_UTILS_BASE_REF_PY_H_
#ifndef MINDSPORE_CCSRC_PYBIND_API_IR_BASE_REF_PY_H_
#define MINDSPORE_CCSRC_PYBIND_API_IR_BASE_REF_PY_H_
#include <memory>
#include <string>
#include <utility>
#include "pybind11/pybind11.h"
#include "base/base_ref.h"
namespace py = pybind11;
@ -26,6 +29,24 @@ namespace mindspore {
class PyObjectRef;
inline std::shared_ptr<PyObjectRef> MakeNode(const py::object &a) { return std::make_shared<PyObjectRef>(a); }
inline std::shared_ptr<PyObjectRef> MakeNode(const py::tuple &a) { return std::make_shared<PyObjectRef>(a); }
class PyObjectRef : public BaseRef {
public:
explicit PyObjectRef(const py::object &py_object) : BaseRef(), object_(py_object) {}
explicit PyObjectRef(const py::tuple &tuple_obj) : BaseRef(), object_(tuple_obj) {}
~PyObjectRef() override = default;
std::shared_ptr<Base> copy() const override { return std::make_shared<PyObjectRef>(object_); }
MS_DECLARE_PARENT(PyObjectRef, BaseRef)
uint32_t type() const override { return tid(); }
std::string ToString() const override { return py::str(object_); }
bool operator==(const BaseRef &other) const override;
bool operator==(const PyObjectRef &other) const;
py::object object_;
};
} // namespace mindspore
#endif // MINDSPORE_CCSRC_UTILS_BASE_REF_PY_H_
#endif // MINDSPORE_CCSRC_PYBIND_API_IR_BASE_REF_PY_H_

View File

@ -23,7 +23,6 @@
#include "utils/convert_utils_base.h"
#include "utils/convert_utils_py.h"
#include "utils/primitive_utils.h"
#include "utils/base_ref_extends.h"
#include "utils/ms_context.h"
#include "pybind_api/api_register.h"
#include "pybind_api/export_flags.h"

View File

@ -1,45 +0,0 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_UTILS_BASE_REF_EXTENDS_H_
#define MINDSPORE_CCSRC_UTILS_BASE_REF_EXTENDS_H_
#include <memory>
#include <string>
#include "pybind_api/ir/base_ref_py.h"
#include "base/base_ref.h"
namespace mindspore {
class PyObjectRef : public BaseRef {
public:
explicit PyObjectRef(const py::object &py_object) : object_(py_object) {}
explicit PyObjectRef(const py::tuple &tuple_obj) : object_(tuple_obj) {}
~PyObjectRef() override = default;
std::shared_ptr<Base> copy() const override { return std::make_shared<PyObjectRef>(object_); }
MS_DECLARE_PARENT(PyObjectRef, BaseRef)
uint32_t type() const override { return tid(); }
std::string ToString() const override { return py::str(object_); }
bool operator==(const BaseRef &other) const override;
bool operator==(const PyObjectRef &other) const;
py::object object_;
};
} // namespace mindspore
#endif // MINDSPORE_CCSRC_UTILS_BASE_REF_EXTENDS_H_

View File

@ -22,7 +22,8 @@
#include "pybind11/pybind11.h"
#include "utils/convert_utils_base.h"
#include "utils/any.h"
#include "utils/base_ref_extends.h"
#include "base/base_ref.h"
#include "base/base.h"
#include "ir/anf.h"
namespace py = pybind11;

View File

@ -13,15 +13,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "utils/load_onnx/anf_converter.h"
#include <fcntl.h>
#include <fstream>
#include <memory>
#include <vector>
#include <string>
#include "utils/base_ref_extends.h"
#include "pybind11/pybind11.h"
#include "utils/load_onnx/anf_model_parser.h"
#include "utils/load_onnx/anf_converter.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "proto/onnx.pb.h"
#include "utils/log_adapter.h"

View File

@ -21,8 +21,8 @@
#include "pipeline/jit/parse/python_adapter.h"
#include "utils/log_adapter.h"
#include "utils/ms_utils.h"
#include "utils/base_ref_extends.h"
#include "utils/convert_utils_py.h"
#include "pybind_api/ir/base_ref_py.h"
namespace mindspore {
py::function GetBpropFunctionByObj(py::object obj) {

View File

@ -0,0 +1,20 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "utils/scoped_long_running.h"
namespace mindspore {
ScopedLongRunningHookPtr ScopedLongRunning::hook_;
} // namespace mindspore

View File

@ -0,0 +1,61 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_UTILS_SCOPED_LONG_RUNNING_H_
#define MINDSPORE_CCSRC_UTILS_SCOPED_LONG_RUNNING_H_
#include <memory>
#include <utility>
namespace mindspore {
// Base Class for scoped long running code.
// Enter() should release some global resoure, like Python GIL;
// Leave() should acquire the same global resource released.
class ScopedLongRunningHook {
public:
ScopedLongRunningHook() = default;
virtual ~ScopedLongRunningHook() = default;
virtual void Enter() = 0;
virtual void Leave() = 0;
};
using ScopedLongRunningHookPtr = std::unique_ptr<ScopedLongRunningHook>;
// Before calling into long-running code, construct this RAII class to release global resource
// like Python GIL.
class ScopedLongRunning {
public:
ScopedLongRunning() {
if (hook_ != nullptr) {
hook_->Enter();
}
}
~ScopedLongRunning() {
if (hook_ != nullptr) {
hook_->Leave();
}
}
static void SetHook(ScopedLongRunningHookPtr hook) {
if (hook_ == nullptr) {
hook_ = std::move(hook);
}
}
private:
static ScopedLongRunningHookPtr hook_;
};
} // namespace mindspore
#endif // MINDSPORE_CCSRC_UTILS_SCOPED_LONG_RUNNING_H_

View File

@ -21,9 +21,10 @@
#include "utils/log_adapter.h"
#include "ir/anf.h"
#include "utils/callbacks.h"
#include "utils/base_ref_extends.h"
#include "utils/convert_utils.h"
#include "backend/session/session_factory.h"
#include "utils/ms_utils.h"
#include "pybind_api/ir/base_ref_py.h"
#ifdef ENABLE_GE
#include "utils/callbacks_ge.h"
#endif

View File

@ -21,7 +21,7 @@
#include "vm/vmimpl.h"
#include "vm/backend.h"
#include "pipeline/jit/parse/data_converter.h"
#include "utils/base_ref_extends.h"
#include "pybind_api/ir/base_ref_py.h"
namespace mindspore {
namespace compile {

View File

@ -29,8 +29,12 @@
#include <deque>
#include <unordered_map>
#include "pybind11/pybind11.h"
#include "ir/anf.h"
#include "utils/base_ref_extends.h"
#include "base/base_ref.h"
namespace py = pybind11;
namespace mindspore {
namespace compile {

View File

@ -27,6 +27,7 @@
#include "ir/anf.h"
#include "ir/manager.h"
#include "ir/tensor.h"
#include "pybind_api/ir/base_ref_py.h"
namespace mindspore {
namespace compile {