!48494 Fix MSVC static variable issue.

Merge pull request !48494 from 张清华/opt_jit_fallback
This commit is contained in:
i-robot 2023-02-07 06:59:38 +00:00 committed by Gitee
commit 8e80753161
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
10 changed files with 48 additions and 25 deletions

View File

@ -20,7 +20,7 @@ mindspore/mindspore/ccsrc/pipeline/jit/parse/resolve.cc:mindspore::parse::Resolv
mindspore/mindspore/ccsrc/pipeline/jit/pipeline.cc:mindspore::pipeline::GraphExecutorPy::Compile
mindspore/mindspore/ccsrc/pipeline/jit/static_analysis/prim.cc:mindspore::abstract::ConvertAbstractToPython
mindspore/mindspore/ccsrc/pybind_api/ir/log_adapter_py.h:mindspore::PyExceptionInitializer::HandleExceptionPy
mindspore/mindspore/ccsrc/pybind_api/ir/py_execute_py.cc:mindspore::PyExecuteInitializer::CppInferShapeAndTypePy
mindspore/mindspore/ccsrc/pybind_api/ir/py_execute_py.h:mindspore::PyExecuteInitializer::CppInferShapeAndTypePy
mindspore/mindspore/ccsrc/plugin/device/gpu/kernel/math/unary_op_gpu_kernel.h:mindspore::kernel::UnaryOpGpuKernel::Launch
mindspore/mindspore/ccsrc/plugin/device/ascend/optimizer/ir_fission/dynamic_rnn_grad_fission_v2.cc:mindspore::opt::AddLSTMInputGradNode
mindspore/mindspore/ccsrc/plugin/device/ascend/kernel/aicpu/aicpu_ops/drop_out_gen_mask_kernels.cc:aicpu::ARMDropOutGenMaskKernel

View File

@ -296,7 +296,13 @@ void InferShape(const CNodePtr &cnode, std::map<uint32_t, tensor::TensorPtr> *de
opt::CppInferShape(primitive, args_spec_list, cnode);
} else {
if (cpp_infer_py_handler_ == nullptr) {
MS_LOG(EXCEPTION) << "\'cpp_infer_py_handler_\' should not be null.";
// If run without Python.
MS_LOG(WARNING) << "\'cpp_infer_py_handler_\' should not be null.";
const auto &abs = opt::CppInferShapeAndType(primitive, args_spec_list);
MS_LOG(DEBUG) << "The abstract of " << cnode->fullname_with_scope() << " changes from " << cnode->abstract()
<< " to " << abs;
cnode->set_abstract(abs);
return;
}
const auto &abs = cpp_infer_py_handler_(cnode, primitive, args_spec_list);
cnode->set_abstract(abs);

View File

@ -1845,8 +1845,10 @@ void SetCpuRefMapToKernelInfo(const CNodePtr &apply_kernel, const std::vector<Ke
auto [is_match, index] = match_result;
if (!is_match) {
constexpr auto recursive_level = 2;
MS_LOG(EXCEPTION) << common::AnfAlgo::GetCNodeName(apply_kernel)
<< " does not support this kernel data type: " << kernel_attr;
<< " does not support this kernel data type: " << kernel_attr
<< "\nnode: " << apply_kernel->DebugString(recursive_level);
}
MS_EXCEPTION_IF_NULL(apply_kernel);

View File

@ -74,7 +74,6 @@
#include "kernel/akg/akg_kernel_build_manager.h"
#include "kernel/graph_kernel_info.h"
#include "include/backend/data_queue/data_queue_mgr.h"
#include "pybind_api/ir/log_adapter_py.h"
#ifndef ENABLE_SECURITY
#include "debug/data_dump/dump_json_parser.h"
#endif
@ -91,9 +90,11 @@
#include "include/common/debug/rdr/recorder_manager.h"
#include "ir/cell.h"
#endif
#include "pybind_api/utils/stub_tensor_py.h"
#include "pybind_api/ir/log_adapter_py.h" // Only include one-time in the whole project.
#include "pybind_api/ir/py_execute_py.h" // Only include one-time in the whole project.
namespace mindspore {
// namespace to support intermediate representation definition
namespace pipeline {

View File

@ -308,10 +308,10 @@ std::pair<std::string, ExceptionType> KernelNotSupportWarning(const CNodePtr &ke
operator_info << "Operator[" << kernel_name << "] ";
size_t input_num = common::AnfAlgo::GetInputTensorNum(kernel_node);
if (input_num > 0) {
operator_info << " input(";
operator_info << "input(";
for (size_t i = 0; i < input_num; ++i) {
operator_info << TypeIdLabel(input_types[i]);
operator_info << "(" << TypeIdLabel(input_object_types[i]) << ")";
operator_info << TypeIdLabel(input_object_types[i]);
operator_info << "(" << TypeIdLabel(input_types[i]) << ")";
if (i != input_num - 1) {
operator_info << ",";
}
@ -322,8 +322,8 @@ std::pair<std::string, ExceptionType> KernelNotSupportWarning(const CNodePtr &ke
if (output_num > 0) {
operator_info << "output(";
for (size_t i = 0; i < output_num; ++i) {
operator_info << TypeIdLabel(infer_output_types[i]);
operator_info << "(" << TypeIdLabel(output_object_types[i]) << ")";
operator_info << TypeIdLabel(output_object_types[i]);
operator_info << "(" << TypeIdLabel(infer_output_types[i]) << ")";
if (i != output_num - 1) {
operator_info << ",";
}

View File

@ -339,7 +339,8 @@ void CPUKernelExecutor::SetOperatorInfo(const KernelGraphPtr &graph) const {
};
auto cnode = graphkernel::TryExpandCNode(node, f);
if (cnode == nullptr) {
MS_EXCEPTION(etype) << msg;
constexpr auto recursive_level = 2;
MS_EXCEPTION(etype) << msg << "\nnode: " << node->DebugString(recursive_level);
}
(void)mng->Replace(node, cnode);
MS_LOG(INFO) << msg << " but expand success.";

View File

@ -289,7 +289,8 @@ void TensorToRawMemory(const tensor::TensorPtr &tensor, const AddressPtr &addres
MS_EXCEPTION_IF_NULL(address);
const auto &res = memcpy_s(address->addr, address->size, tensor->data_c(), tensor->Size());
if (res != EOK) {
MS_LOG(EXCEPTION) << "memcpy failed. res: " << res;
MS_LOG(EXCEPTION) << "memcpy failed. res: " << res << ", dest size: " << address->size
<< ", src size: " << tensor->Size();
}
}
@ -324,10 +325,11 @@ bool PyExecuteCpuKernelMod::Launch(const std::vector<AddressPtr> &inputs, const
// To call the script with global and local parameters.
const auto &global_dict = CallPythonGetGlobalParams();
const auto &py_script = py::str(script);
auto params = py::tuple(2);
constexpr auto number_two = 2;
auto params = py::tuple(number_two);
params[0] = global_dict;
params[1] = local_dict;
MS_LOG(DEBUG) << "Python script: " << py_script << ", params: " << params;
MS_LOG(DEBUG) << "Python script: " << py_script << ", local_dict: " << local_dict;
try {
mindspore::ScopedFallbackRunning fallback_running;
const auto &output = CallPythonScript(py_script, params);

View File

@ -13,8 +13,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_PYBINDAPI_IR_LOGADAPTERPY_H_
#define MINDSPORE_CCSRC_PYBINDAPI_IR_LOGADAPTERPY_H_
// NOTICE: This header file should only be included once in the whole project.
// We change the cpp file to header file, to avoid MSVC compiler problem.
#ifndef MINDSPORE_CCSRC_PYBINDAPI_IR_LOG_ADAPTER_PY_H_
#define MINDSPORE_CCSRC_PYBINDAPI_IR_LOG_ADAPTER_PY_H_
#include "utils/log_adapter.h"
@ -101,4 +104,4 @@ class PyExceptionInitializer {
static PyExceptionInitializer py_exception_initializer;
} // namespace mindspore
#endif // MINDSPORE_CCSRC_PYBINDAPI_IR_LOGADAPTERPY_H_
#endif // MINDSPORE_CCSRC_PYBINDAPI_IR_LOG_ADAPTER_PY_H_

View File

@ -14,6 +14,15 @@
* limitations under the License.
*/
// NOTICE: This header file should only be included once in the whole project.
// We change the cpp file to header file, to avoid MSVC compiler problem.
#ifndef MINDSPORE_CCSRC_PYBINDAPI_IR_PY_EXECUTE_PY_H_
#define MINDSPORE_CCSRC_PYBINDAPI_IR_PY_EXECUTE_PY_H_
#include <vector>
#include <string>
#include <memory>
#include "pybind11/pybind11.h"
#include "pybind_api/pybind_patch.h"
@ -28,14 +37,12 @@
namespace py = pybind11;
namespace mindspore {
namespace {
py::object CallPythonGetGlobalParams() {
static py::object CallPythonGetGlobalParams() {
constexpr auto python_mod_parse = "mindspore._extends.parse"; // The same as PYTHON_MOD_PARSE_MODULE[]
py::module mod = python_adapter::GetPyModule(python_mod_parse);
constexpr auto python_get_dict = "get_global_params";
return python_adapter::CallPyModFn(mod, python_get_dict);
}
} // namespace
class PyExecuteInitializer {
public:
@ -60,7 +67,8 @@ class PyExecuteInitializer {
const auto &infer_shape = std::make_shared<abstract::Shape>(ShapeVector({1}));
return abstract::MakeAbstract(infer_shape, kFloat64);
}
const auto &values_tuple_abs = input_args[2];
constexpr auto number_two = 2;
const auto &values_tuple_abs = input_args[number_two];
const auto &values_tuple = values_tuple_abs->BuildValue();
if (values_tuple == kAnyValue) {
MS_LOG(EXCEPTION) << "Value tuple should not be anyvalue.";
@ -102,10 +110,10 @@ class PyExecuteInitializer {
}
const auto &global_dict = CallPythonGetGlobalParams();
const auto &py_script = py::str(script_str->value());
auto params = py::tuple(2);
auto params = py::tuple(number_two);
params[0] = global_dict;
params[1] = local_dict;
MS_LOG(DEBUG) << "Python script: " << py_script << ", params: " << params;
MS_LOG(DEBUG) << "Python script: " << py_script << ", local_dict: " << local_dict;
try {
mindspore::ScopedFallbackRunning fallback_running;
const auto &output = parse::data_converter::CallPythonScript(py_script, params);
@ -230,3 +238,4 @@ class PyExecuteInitializer {
static PyExecuteInitializer py_execute_initializer;
} // namespace mindspore
#endif // MINDSPORE_CCSRC_PYBINDAPI_IR_PY_EXECUTE_PY_H_

View File

@ -53,8 +53,7 @@ TypePtr PyExecuteInfer::InferType(const PrimitivePtr &primitive, const std::vect
AbstractBasePtr PyExecuteInfer::InferShapeAndType(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
const std::vector<AbstractBasePtr> &input_args) const {
const auto &abs = infer_handler_(input_args);
return abs;
return InferPy(primitive, input_args);
}
std::set<int64_t> PyExecuteInfer::GetValueDependArgIndices() const { return {-1}; }