!29570 add julia version check and platform check

Merge pull request !29570 from r1chardf1d0/master
This commit is contained in:
i-robot 2022-01-29 08:47:54 +00:00 committed by Gitee
commit 8f784d40c1
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
6 changed files with 181 additions and 98 deletions

View File

@ -32,7 +32,7 @@ void CustomJULIACpuKernelMod::InitKernel(const CNodePtr &kernel_node) {
auto pos1 = exec_info.find(":");
auto pos2 = exec_info.rfind(":");
if (pos1 == std::string::npos || pos2 == std::string::npos || pos1 == pos2) {
MS_LOG(EXCEPTION) << "Wrong execute info:" << exec_info << ", it should be file:module:func";
MS_LOG(EXCEPTION) << "Wrong execute info: " << exec_info << ", it should be file:module:func";
}
auto path = exec_info.substr(0, pos1);
auto real_path = FileUtils::GetRealPath(path.c_str());
@ -102,16 +102,15 @@ bool CustomJULIACpuKernelMod::Launch(const std::vector<AddressPtr> &inputs, cons
if (init) {
ret = julia->Run(file_path_, module_name_, func_name_, nparam, params, ndims_, shapes_, type_pointer_list_);
} else {
MS_LOG(EXCEPTION) << "Julia kernel" << file_path_ << ":" << module_name_ << ":" << func_name_ << "init fail.";
MS_LOG(EXCEPTION) << "Julia kernel[" << file_path_ << ":" << module_name_ << ":" << func_name_ << "] init fail.";
}
if (ret) {
MS_LOG(EXCEPTION) << "Julia kernel" << file_path_ << ":" << module_name_ << ":" << func_name_
<< "had a julia inner error.";
MS_LOG(EXCEPTION) << "Julia kernel[" << file_path_ << ":" << module_name_ << ":" << func_name_
<< "] had a julia runtime error.";
}
} catch (const std::exception &e) {
MS_LOG(EXCEPTION) << "CustomJULIA operator failed when running julia func: " << file_path_ << ":" << module_name_
<< ":" << func_name_ << "! "
<< "Error message is " << e.what();
<< ":" << func_name_ << "! ";
}
return true;
}

View File

@ -104,7 +104,7 @@ class JuliaAPI {
// open julia shared library
handle_ = dlopen(kLibJulia, RTLD_LAZY | RTLD_LOCAL);
if (!handle_) {
MS_LOG(EXCEPTION) << "Julia lib Open Error: " << dlerror();
MS_LOG(EXCEPTION) << dlerror();
return false;
}
#else
@ -164,6 +164,11 @@ class JuliaAPI {
jl_init__threading_ = nullptr;
jl_apply_array_type_ = nullptr;
jl_ptr_to_array_ = nullptr;
jl_typeof_str_ = nullptr;
jl_stderr_obj_ = nullptr;
jl_current_exception_ = nullptr;
jl_ver_major_ = nullptr;
jl_ver_minor_ = nullptr;
}
~JuliaAPI() {
#if !defined(_WIN32) && !defined(_WIN64)
@ -215,6 +220,14 @@ class JuliaAPI {
bool InitJuliaFunc() {
bool suc = true;
#if !defined(_WIN32) && !defined(_WIN64)
GET_HOOK(jl_ver_major, int, void);
GET_HOOK(jl_ver_minor, int, void);
if (!suc) return false;
constexpr int SupportedMinor = 6;
if (JlVerMajor() < 1 || (JlVerMajor() == 1 && JlVerMinor() < SupportedMinor)) {
MS_LOG(WARNING) << "we only support julia version >= 1.6 now and have tested in version 1.6";
return false;
}
GET_HOOK(jl_eval_string, jl_value_t *, const char *);
GET_HOOK(jl_get_global, jl_value_t *, jl_module_t *, jl_sym_t *);
GET_HOOK(jl_symbol, jl_sym_t *, const char *);
@ -224,6 +237,9 @@ class JuliaAPI {
GET_HOOK(jl_init__threading, void, void);
GET_HOOK(jl_apply_array_type, jl_value_t *, jl_value_t *, size_t);
GET_HOOK(jl_ptr_to_array, jl_array_t *, jl_value_t *, void *, jl_value_t *, int);
GET_HOOK(jl_typeof_str, const char *, jl_value_t *);
GET_HOOK(jl_stderr_obj, jl_value_t *, void);
GET_HOOK(jl_current_exception, jl_value_t *, void);
#else
suc = false;
#endif
@ -246,6 +262,18 @@ class JuliaAPI {
// call the julia function
JlCall(jfunc, &args[0], nparam_);
if (JlExceptionOccurred()) {
MS_LOG(EXCEPTION) << JlTypeOfStr(JlExceptionOccurred());
auto errs = JlStdErrObj();
if (errs) {
JlEvalString("using Main.Base");
auto base = reinterpret_cast<jl_module_t *>(JlEvalString("Main.Base"));
auto show = JlGetFunction(base, "show");
if (show) {
std::vector<jl_value_t *> err_args{errs, JlCurrentException()};
constexpr int arg_num = 2;
JlCall(show, &err_args[0], arg_num);
}
}
return -1;
}
JlAtexitHook(0);
@ -272,6 +300,16 @@ class JuliaAPI {
return jl_ptr_to_array_(atype, data, dims, own_buffer);
}
const char *JlTypeOfStr(jl_value_t *v) { return jl_typeof_str_(v); }
jl_value_t *JlStdErrObj() { return jl_stderr_obj_(); }
jl_value_t *JlCurrentException() { return jl_current_exception_(); }
int JlVerMajor() { return jl_ver_major_(); }
int JlVerMinor() { return jl_ver_minor_(); }
jl_function_t *JlGetFunction(jl_module_t *m, const std::string &name) {
return reinterpret_cast<jl_function_t *>(JlGetGlobal(m, JlSymbol(name)));
}
@ -343,6 +381,11 @@ class JuliaAPI {
void (*jl_init__threading_)(void);
jl_value_t *(*jl_apply_array_type_)(jl_value_t *, size_t);
jl_array_t *(*jl_ptr_to_array_)(jl_value_t *, void *, jl_value_t *, int);
const char *(*jl_typeof_str_)(jl_value_t *);
jl_value_t *(*jl_stderr_obj_)(void);
jl_value_t *(*jl_current_exception_)(void);
int (*jl_ver_major_)(void);
int (*jl_ver_minor_)(void);
};
} // namespace kernel
} // namespace mindspore

View File

@ -347,14 +347,30 @@ class Custom(ops.PrimitiveWithInfer):
def get_bprop(self):
return self.bprop
def _check_julia_func(self):
"""Check the validity of julia func"""
if not isinstance(self.func, str):
raise TypeError("{} func should be of type str, but got {}".format(self.func_type, type(self.func)))
if self.func.count(':') != 2:
raise Exception("func format in julia custom op should be file:module:func.")
file, module, func = self.func.split(':')
with open(file, 'r') as f:
jl = f.read()
if 'module ' + module not in jl:
raise Exception("module: " + module + " not found!!!")
if 'function ' + func not in jl:
raise Exception("function: " + func + " not found!!!")
def _check_func(self):
"""Check the validity of func_type and type of func"""
if self.func_type not in self.supported_func_type:
raise ValueError("func_type should be one of {}, but got {}"
.format(self.supported_func_type, self.func_type))
if self.func_type == "aot" or self.func_type == "julia":
if self.func_type == "aot":
if not isinstance(self.func, str):
raise TypeError("{} func should be of type str, but got {}".format(self.func_type, type(self.func)))
elif self.func_type == "julia":
self._check_julia_func()
else:
if not callable(self.func):
raise TypeError("{} func should be of type function, but got {}"

View File

@ -0,0 +1,104 @@
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import os
import platform
import numpy as np
import pytest
from mindspore import context, Tensor
from mindspore.common import dtype as mstype
from mindspore.nn import Cell
import mindspore.ops as ops
from mindspore.ops import DataType, CustomRegOp
class JuliaSingleOutputNet(Cell):
def __init__(self, func, out_shapes, out_types, reg=None):
super(JuliaSingleOutputNet, self).__init__()
self.program = ops.Custom(func, out_shapes, out_types, "julia", reg_info=reg)
def construct(self, x, y):
return self.program(x, y)
def add(x, y):
"""
function add for benchmark
"""
return x + y
def sub(x, y):
"""
function sub for benchmark
"""
return x - y
def julia_single_output(func_name, bench, reg):
shape = (4, 5)
input_x = np.random.normal(0, 1, shape).astype(np.float32)
input_y = np.random.normal(0, 1, shape).astype(np.float32)
func_path = os.path.dirname(os.path.abspath(__file__)) + "/julia_test_files/"
try:
test = JuliaSingleOutputNet(func_path + func_name, (shape,), (mstype.float32,), reg)
output = test(Tensor(input_x), Tensor(input_y))[0]
except Exception as e:
raise e
assert np.allclose(bench(input_x, input_y), output.asnumpy(), 0.001, 0.001)
cpu_info = CustomRegOp() \
.input(0, "x1") \
.input(1, "x2") \
.output(0, "y") \
.dtype_format(DataType.None_None, DataType.None_None, DataType.None_None) \
.target("CPU") \
.get_op_info()
@pytest.mark.level2
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_julia_single_output_cpu_add():
"""
Feature: custom julia operator, multiple inputs, single output, CPU, GRAPH_MODE
Description: pre-write xxx.jl, custom operator launches xxx.jl
Expectation: nn result matches numpy result
"""
system = platform.system()
if system != 'Linux':
pass
else:
context.set_context(mode=context.GRAPH_MODE, device_target='CPU')
julia_single_output("add.jl:Add:foo!", add, cpu_info)
@pytest.mark.level2
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_julia_single_output_cpu_sub():
"""
Feature: custom julia operator, multiple inputs, single output, CPU, GRAPH_MODE
Description: pre-write xxx.jl, custom operator launches xxx.jl
Expectation: nn result matches numpy result
"""
system = platform.system()
if system != 'Linux':
pass
else:
context.set_context(mode=context.GRAPH_MODE, device_target='CPU')
julia_single_output("sub.jl:Sub:foo!", sub, cpu_info)

View File

@ -0,0 +1,3 @@
#!/bin/sh
export LD_LIBRARY_PATH=$JULIA_DIR/lib:$LD_LIBRARY_PATH
pytest -m level2 julia_cases.py

98
tests/st/ops/graph_kernel/custom/test_custom_julia.py Executable file → Normal file
View File

@ -1,98 +1,16 @@
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import os
import platform
import numpy as np
import pytest
from mindspore import context, Tensor
from mindspore.common import dtype as mstype
from mindspore.nn import Cell
import mindspore.ops as ops
from mindspore.ops import DataType, CustomRegOp
class JuliaSingleOutputNet(Cell):
def __init__(self, func, out_shapes, out_types, reg=None):
super(JuliaSingleOutputNet, self).__init__()
self.program = ops.Custom(func, out_shapes, out_types, "julia", reg_info=reg)
def construct(self, x, y):
return self.program(x, y)
def add(x, y):
return x + y
def sub(x, y):
return x - y
def julia_single_output(func_name, bench, reg):
shape = (4, 5)
input_x = np.random.normal(0, 1, shape).astype(np.float32)
input_y = np.random.normal(0, 1, shape).astype(np.float32)
func_path = os.path.dirname(os.path.abspath(__file__)) + "/julia_test_files/"
try:
test = JuliaSingleOutputNet(func_path + func_name, (shape,), (mstype.float32,), reg)
output = test(Tensor(input_x), Tensor(input_y))[0]
except Exception as e:
raise e
assert np.allclose(bench(input_x, input_y), output.asnumpy(), 0.001, 0.001)
cpu_info = CustomRegOp() \
.input(0, "x1") \
.input(1, "x2") \
.output(0, "y") \
.dtype_format(DataType.None_None, DataType.None_None, DataType.None_None) \
.target("CPU") \
.get_op_info()
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_julia_single_output_cpu_add():
def test_julia():
"""
Feature: custom julia operator, multiple inputs, single output, CPU, GRAPH_MODE
Description: pre-write xxx.jl, custom operator launches xxx.jl
Expectation: nn result matches numpy result
Feature: test custom op of julia cases
Description: run julia_cases
Expectation: res == 0
"""
sys = platform.system()
if sys == 'Windows':
pass
else:
context.set_context(mode=context.GRAPH_MODE, device_target='CPU')
julia_single_output("add.jl:Add:foo!", add, cpu_info)
@pytest.mark.level1
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_julia_single_output_cpu_sub():
"""
Feature: custom julia operator, multiple inputs, single output, CPU, GRAPH_MODE
Description: pre-write xxx.jl, custom operator launches xxx.jl
Expectation: nn result matches numpy result
"""
sys = platform.system()
if sys == 'Windows':
pass
else:
context.set_context(mode=context.GRAPH_MODE, device_target='CPU')
julia_single_output("sub.jl:Sub:foo!", sub, cpu_info)
res = os.system('sh julia_run.sh')
if res != 0:
assert False, 'julia test fail'