forked from mindspore-Ecosystem/mindspore
syntax
This commit is contained in:
parent
f65954438a
commit
4296881a16
|
@ -27,7 +27,7 @@ from .parser import (Parser, create_instance, is_supported_create_instance_type,
|
|||
__all__ = ['parse_cb', 'get_parse_method_of_class', 'get_bprop_method_of_class', 'resolve_symbol',
|
||||
'get_object_key', 'get_class_instance_type', 'is_class_member', 'get_ast_type', 'get_node_type',
|
||||
'get_args_default_values', 'get_ast_namespace_symbol', 'get_operation_namespace_symbol',
|
||||
'get_args', 'get_obj_type', 'get_obj_id', 'create_instance', 'is_supported_create_instance_type',
|
||||
'get_args', 'get_obj_type', 'create_instance', 'is_supported_create_instance_type',
|
||||
'get_module_namespace', 'get_class_member_namespace_symbol', 'get_obj_id', 'Parser',
|
||||
'get_dataclass_attributes', 'get_dataclass_methods', 'get_dataclass_methods', 'get_scope_name',
|
||||
'eval_script', 'create_slice_obj', 'convert_to_ms_tensor', 'get_object_description', 'expand_expr_statement']
|
||||
|
|
|
@ -387,6 +387,8 @@ def get_object_description(obj, fname, fline):
|
|||
return f"function '{obj.__name__}' at {fname}:{fline}"
|
||||
if isinstance(obj, ast.FunctionDef):
|
||||
return f"function '{obj.name}' at {fname}:{fline}"
|
||||
if isinstance(obj, ast.Attribute):
|
||||
return f"attribute "
|
||||
return str(obj)
|
||||
|
||||
|
||||
|
@ -583,7 +585,7 @@ class Parser:
|
|||
logger.debug(f"Found `{var}` in global_namespace {self.global_namespace.__str__()}")
|
||||
value = self.global_namespace[var]
|
||||
if self.is_unsupported_namespace(value):
|
||||
error_info = f"The builtin function '{var}' is not supported in graph mode."
|
||||
error_info = f"The builtin function '{var}' of python is not supported in graph mode."
|
||||
return None, var, error_info
|
||||
return self.global_namespace, var
|
||||
|
||||
|
@ -686,7 +688,7 @@ class Parser:
|
|||
elif isinstance(class_type_node, ast.Attribute):
|
||||
class_name = getattr(class_type_node, 'attr')
|
||||
else:
|
||||
raise ValueError(f"When call 'super', the first arg should be a class type, "
|
||||
raise ValueError(f"The first argument of 'super()' must be a class type, "
|
||||
f"but got {class_type_node.__class__.__name__}.")
|
||||
|
||||
target_father_class = None
|
||||
|
@ -695,7 +697,8 @@ class Parser:
|
|||
target_father_class = class_element
|
||||
break
|
||||
if target_father_class is None:
|
||||
raise ValueError("When call 'super', the second arg should be an instance of first arg.")
|
||||
raise ValueError(f"The second argument of 'super()' must be 'self', "
|
||||
f"but got {subclass_instance}.")
|
||||
return super(target_father_class, subclass_instance)
|
||||
|
||||
def get_location(self, node):
|
||||
|
|
|
@ -253,8 +253,9 @@ bool InnerScalarGe(T x, U y) {
|
|||
int64_t sum = InnerScalar##op_t(IntToLong(GetValue<int>(x)), GetValue<int64_t>(y)); \
|
||||
return MakeValue(sum); \
|
||||
} \
|
||||
MS_LOG(EXCEPTION) << "Unsupported Value for Scalar" << #op_t << ", x: " << x->ToString() \
|
||||
<< ", y: " << y->ToString(); \
|
||||
MS_LOG(EXCEPTION) << "Unsupported input type for Scalar" << #op_t << ", type of x:" << x->type_name() \
|
||||
<< ", value of x:" << x->ToString() << ", type of y:" << y->type_name() \
|
||||
<< ", value of y:" << y->ToString(); \
|
||||
} while (0); \
|
||||
}
|
||||
|
||||
|
|
|
@ -302,7 +302,8 @@ ValuePtr ConvertSlice(const py::object &obj) {
|
|||
auto value = py::cast<int64_t>(py_attr);
|
||||
return MakeValue(value);
|
||||
}
|
||||
MS_LOG(EXCEPTION) << "Slice should contain only int64_t or none";
|
||||
MS_LOG(EXCEPTION) << "Attribute '" << attr << "' of " << py::str(obj) << " should be int but got "
|
||||
<< py::str(py_attr);
|
||||
};
|
||||
ValuePtr start = convert_func("start");
|
||||
ValuePtr stop = convert_func("stop");
|
||||
|
|
|
@ -166,8 +166,8 @@ void CheckFuncReturn(const FuncGraphPtr &fn, const std::shared_ptr<ParseFunction
|
|||
}
|
||||
py::str desc =
|
||||
python_adapter::CallPyModFn(ast->module(), PYTHON_MOD_GET_OBJECT_DESCRIPTION, ast->function(), ret[0], ret[1]);
|
||||
MS_EXCEPTION(TypeError) << "Missing return statement in " << desc.cast<std::string>() << ". "
|
||||
<< "FuncGraph: " << func_graph->ToString();
|
||||
MS_EXCEPTION(TypeError) << "Function must has 'return' statement, but missing in " << desc.cast<std::string>()
|
||||
<< ". FuncGraph: " << func_graph->ToString();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -315,7 +315,8 @@ FunctionBlockPtr Parser::ParseFunction(const py::object &node, const FunctionBlo
|
|||
if (current_fg->get_return() == nullptr) {
|
||||
py::list ret = ast_->CallParserObjMethod(PYTHON_PARSE_GET_LOCATION, node);
|
||||
py::str desc = python_adapter::CallPyModFn(ast_->module(), PYTHON_MOD_GET_OBJECT_DESCRIPTION, node, ret[0], ret[1]);
|
||||
MS_EXCEPTION(TypeError) << "Missing return statement in " << desc.cast<std::string>() << ".";
|
||||
MS_EXCEPTION(TypeError) << "Function must has 'return' statement, but missing in " << desc.cast<std::string>()
|
||||
<< ".";
|
||||
}
|
||||
GenerateArgsDefaultValueForFunction(func_block, node);
|
||||
return func_block;
|
||||
|
@ -363,7 +364,8 @@ FunctionBlockPtr Parser::ParseStatement(const FunctionBlockPtr &block, const py:
|
|||
return stmt_block;
|
||||
} else {
|
||||
errcode_ = PARSE_NODE_METHOD_UNSUPPORTED;
|
||||
MS_LOG(EXCEPTION) << "Unsupported syntax '" << node_name << "'.";
|
||||
MS_LOG(EXCEPTION) << "Unsupported statement '" << node_name
|
||||
<< "'.\nMore details please refer to syntax support at https://www.mindspore.cn";
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -386,7 +388,8 @@ AnfNodePtr Parser::ParseExprNode(const FunctionBlockPtr &block, const py::object
|
|||
return expr_node;
|
||||
} else {
|
||||
errcode_ = PARSE_NODE_METHOD_UNSUPPORTED;
|
||||
MS_LOG(EXCEPTION) << "Unsupported syntax '" << node_name << "'.";
|
||||
MS_LOG(EXCEPTION) << "Unsupported expression '" << node_name
|
||||
<< "'.\nMore details please refer to syntax support at https://www.mindspore.cn";
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -540,7 +543,8 @@ AnfNodePtr Parser::ParseNum(const FunctionBlockPtr &, const py::object &node) {
|
|||
} else {
|
||||
// no else actually
|
||||
errcode_ = PARSE_NODE_TYPE_UNKNOWN;
|
||||
MS_LOG(EXCEPTION) << "Unsupported Num type : " << (std::string)py::str(obj);
|
||||
MS_EXCEPTION(TypeError) << "Only support 'Number' type of 'int` and 'float', but got type: " << obj.get_type()
|
||||
<< " Value:" << py::str(obj);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -613,10 +617,11 @@ AnfNodePtr Parser::ParseSuper(const FunctionBlockPtr &block, const py::list &arg
|
|||
father_class = args[0];
|
||||
auto arg_type = AstSubType(py::cast<int32_t>(ast_->CallParseModFunction(PYTHON_PARSE_GET_AST_TYPE, args[1])));
|
||||
if (arg_type != AST_SUB_TYPE_NAME || py::cast<std::string>(python_adapter::GetPyObjAttr(args[1], "id")) != "self") {
|
||||
MS_EXCEPTION(ArgumentError) << "When call 'super', the second arg should be 'self'.";
|
||||
MS_EXCEPTION(ArgumentError) << "Argument 2 of 'super()' must be 'self', but got '"
|
||||
<< py::cast<std::string>(python_adapter::GetPyObjAttr(args[1], "id")) << "'.";
|
||||
}
|
||||
} else {
|
||||
MS_EXCEPTION(ArgumentError) << "When call 'super', the args number should be 0 or 2, but got" << args.size() << ".";
|
||||
MS_EXCEPTION(ArgumentError) << "Arguments number of 'super()' should be 0 or 2, but got " << args.size() << ".";
|
||||
}
|
||||
py::object target_class_instance = ast_->CallParserObjMethod(PYTHON_PARSE_ANALYZE_SUPER, father_class, ast_->obj());
|
||||
py::object namespace_var = ast_->CallParseModFunction(PYTHON_MOD_GET_MEMBER_NAMESPACE_SYMBOL, target_class_instance);
|
||||
|
@ -733,9 +738,9 @@ bool Parser::ParseKeywordsInCall(const FunctionBlockPtr &block, const py::object
|
|||
} else {
|
||||
auto kw_key_c = kw_key.cast<std::string>();
|
||||
keys.push_back(NewValueNode(kw_key_c));
|
||||
auto node = ParseExprNode(block, kw_value);
|
||||
node = HandleInterpret(block, node, kw_value);
|
||||
values.push_back(node);
|
||||
auto ret_node = ParseExprNode(block, kw_value);
|
||||
ret_node = HandleInterpret(block, ret_node, kw_value);
|
||||
values.push_back(ret_node);
|
||||
}
|
||||
}
|
||||
auto keys_tuple = GenerateMakeTuple(block, keys);
|
||||
|
@ -811,8 +816,8 @@ AnfNodePtr Parser::ParseCompare(const FunctionBlockPtr &block, const py::object
|
|||
// Which there is two ops , but we only support one now
|
||||
py::list ops = python_adapter::GetPyObjAttr(node, "ops");
|
||||
if (ops.size() != MAX_COMPARISON_OPS_SUPPORTED) {
|
||||
MS_EXCEPTION(NotSupportError) << "MindSpore only support comparison with operators with one now, ops size ="
|
||||
<< ops.size();
|
||||
MS_EXCEPTION(NotSupportError) << "Only support comparison with 1 operator, but got " << ops.size() << ", which is "
|
||||
<< py::str(ops);
|
||||
}
|
||||
|
||||
py::object left = python_adapter::GetPyObjAttr(node, "left");
|
||||
|
@ -1087,9 +1092,18 @@ FunctionBlockPtr Parser::ParseAugAssign(const FunctionBlockPtr &block, const py:
|
|||
target_node = ParseSubscript(block, target_object);
|
||||
} else if (ast_->IsClassMember(target_object)) {
|
||||
target_node = ParseAttribute(block, target_object);
|
||||
} else if (ast_type == AST_SUB_TYPE_ATTRIBUTE) {
|
||||
TraceGuard(GetLocation(target_object));
|
||||
MS_EXCEPTION(TypeError) << "Only support augassign to attribute of self, but got attribute of "
|
||||
<< py::str(target_object.attr("value").attr("id")) << ".\n"
|
||||
<< "More details please refer to syntax support at https://www.mindspore.cn";
|
||||
} else {
|
||||
MS_LOG(EXCEPTION) << "Not supported augassign";
|
||||
TraceGuard(GetLocation(target_object));
|
||||
MS_EXCEPTION(TypeError) << "Only supported augassign to attribute of self, variable and index value, but got "
|
||||
<< target_object.get_type()
|
||||
<< ".\nMore details please refer to syntax support at https://www.mindspore.cn";
|
||||
}
|
||||
|
||||
if (target_node == nullptr) {
|
||||
MS_LOG(EXCEPTION) << "Can not get target node ";
|
||||
}
|
||||
|
@ -1648,7 +1662,7 @@ AnfNodePtr Parser::ParseListComp(const FunctionBlockPtr &block, const py::object
|
|||
// Handle generators attribute.
|
||||
py::list generators_node = python_adapter::GetPyObjAttr(node, "generators");
|
||||
if (generators_node.size() != 1) {
|
||||
MS_EXCEPTION(TypeError) << "The `generators` supports one `comprehension` in ListComp/GeneratorExp, but got "
|
||||
MS_EXCEPTION(TypeError) << "The 'generators' supports 1 'comprehension' in ListComp/GeneratorExp, but got "
|
||||
<< generators_node.size() << " comprehensions.";
|
||||
}
|
||||
py::object generator_node = generators_node[0];
|
||||
|
@ -1717,14 +1731,15 @@ void Parser::HandleAssignClassMember(const FunctionBlockPtr &block, const py::ob
|
|||
|
||||
// Now only support the self.xxx = yyy, where self.xxx must be a defined Parameter type
|
||||
if (!py::hasattr(ast()->obj(), common::SafeCStr(attr_name))) {
|
||||
MS_EXCEPTION(TypeError) << "'" << var_name << "' should be defined in the class '__init__' function. \n\n"
|
||||
<< trace::GetDebugInfo(target_node->debug_info());
|
||||
MS_EXCEPTION(TypeError)
|
||||
<< "'" << var_name << "' should be initialized as a 'Parameter' in the '__init__' function before assigning.\n\n"
|
||||
<< trace::GetDebugInfo(target_node->debug_info());
|
||||
}
|
||||
auto obj = ast()->obj().attr(common::SafeCStr(attr_name));
|
||||
auto obj_type = obj.attr("__class__").attr("__name__");
|
||||
if (!py::hasattr(obj, "__parameter__")) {
|
||||
MS_EXCEPTION(TypeError) << "'" << var_name
|
||||
<< "' should be defined with a Parameter type in the class '__init__' function, but got '"
|
||||
<< "' should be initialized as a 'Parameter' type in the '__init__' function, but got '"
|
||||
<< py::str(obj).cast<std::string>() << "' with type '"
|
||||
<< py::str(obj_type).cast<std::string>() << ".\n\n"
|
||||
<< trace::GetDebugInfo(target_node->debug_info());
|
||||
|
@ -1750,14 +1765,16 @@ void Parser::HandleAssignSubscript(const FunctionBlockPtr &block, const py::obje
|
|||
auto attr_name = value_obj.attr("attr").cast<std::string>();
|
||||
var_name = "self." + attr_name;
|
||||
if (!py::hasattr(ast()->obj(), common::SafeCStr(attr_name))) {
|
||||
MS_EXCEPTION(TypeError) << "'" << var_name << "' was not defined in the class '__init__' function.\n\n"
|
||||
<< trace::GetDebugInfo(value_node->debug_info());
|
||||
MS_EXCEPTION(TypeError)
|
||||
<< "'" << var_name
|
||||
<< "' should be initialized as a 'Parameter' in the '__init__' function before assigning.\n\n"
|
||||
<< trace::GetDebugInfo(value_node->debug_info());
|
||||
}
|
||||
auto obj = ast()->obj().attr(common::SafeCStr(attr_name));
|
||||
auto obj_type = obj.attr("__class__").attr("__name__");
|
||||
if (!py::hasattr(obj, "__parameter__")) {
|
||||
MS_EXCEPTION(TypeError) << "'" << var_name
|
||||
<< "' should be defined with a Parameter in the class '__init__' function, but got '"
|
||||
<< "' should be initialized as a 'Parameter' in the '__init__' function, but got '"
|
||||
<< py::str(obj).cast<std::string>() << "' with type '"
|
||||
<< py::str(obj_type).cast<std::string>() << "'.\n\n"
|
||||
<< trace::GetDebugInfo(value_node->debug_info());
|
||||
|
@ -1792,11 +1809,15 @@ void Parser::WriteAssignVars(const FunctionBlockPtr &block, const py::object &ta
|
|||
} else if (ast_->IsClassMember(target_object)) {
|
||||
HandleAssignClassMember(block, target_object, value_node);
|
||||
} else if (ast_type == AST_SUB_TYPE_ATTRIBUTE) {
|
||||
MS_LOG(EXCEPTION) << "The subnet attributes cannot be changed in the network. \n\n"
|
||||
<< trace::GetDebugInfo(value_node->debug_info());
|
||||
TraceGuard(GetLocation(target_object));
|
||||
MS_EXCEPTION(TypeError) << "Only support assign to attribute of self, but got attribute of "
|
||||
<< py::str(target_object.attr("value").attr("id")) << ".\n"
|
||||
<< "More details please refer to syntax support at https://www.mindspore.cn";
|
||||
} else {
|
||||
MS_LOG(EXCEPTION) << "Not support this assign type: " << ast_type << "\n\n"
|
||||
<< trace::GetDebugInfo(value_node->debug_info());
|
||||
TraceGuard(GetLocation(target_object));
|
||||
MS_EXCEPTION(TypeError) << "Only supported augassign to attribute of self, variable and index value, but got "
|
||||
<< target_object.get_type()
|
||||
<< ".\nMore details please refer to syntax support at https://www.mindspore.cn";
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -232,8 +232,8 @@ void CheckArgsValid(const py::tuple &args) {
|
|||
MS_EXCEPTION(TypeError)
|
||||
<< "The inputs types of the outermost network support bool, int, float, tensor, "
|
||||
"mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), "
|
||||
"and tuple or list containing only these types, and dict whose values are these types, but got "
|
||||
<< i << "th arg is " << py::str(args[i]);
|
||||
"and tuple or list containing only these types, and dict whose values are these types, but the "
|
||||
<< i << "th arg type is " << args[i].get_type() << ", value is '" << py::str(args[i]) << "'.";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -716,8 +716,9 @@ bool GraphExecutorPy::CompileInner(const py::object &source_obj, const py::tuple
|
|||
CheckArgsValid(args);
|
||||
|
||||
auto phase = py::cast<std::string>(phase_obj);
|
||||
MS_LOG(INFO) << "Start compiling, phase: " << phase << ", source: {" << py::str(source_obj) << "}";
|
||||
MS_LOG(DEBUG) << "args: " << py::str(const_cast<py::tuple &>(args));
|
||||
MS_LOG(INFO) << "Start compiling, phase: " << phase << ".";
|
||||
MS_LOG(DEBUG) << "Compiling source: {" << py::str(source_obj)
|
||||
<< "}\n\n Args: " << py::str(const_cast<py::tuple &>(args));
|
||||
|
||||
#ifdef ENABLE_GE
|
||||
GetGeBackendPolicy();
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import mindspore.nn as nn
|
||||
import mindspore
|
||||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, save_graphs=True, save_graphs_path="graph_paths")
|
||||
|
||||
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
x += 1
|
||||
#x += self.x
|
||||
print(x)
|
||||
#x = y
|
||||
x = "aaa"
|
||||
#x = 5.0
|
||||
return x
|
||||
|
||||
|
||||
def test_assign():
|
||||
net = Net()
|
||||
y = Tensor((1), mindspore.int32)
|
||||
x = 1
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
print(x)
|
|
@ -0,0 +1,45 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import mindspore.nn as nn
|
||||
import mindspore
|
||||
from mindspore import context
|
||||
from mindspore.common.parameter import Parameter
|
||||
from mindspore.common.tensor import Tensor
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE)
|
||||
|
||||
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.weight = Parameter(Tensor(3, mindspore.float32), name="w")
|
||||
self.m = 2
|
||||
|
||||
def construct(self, x, y):
|
||||
self.weight = x
|
||||
self.m = 3
|
||||
#self.l = 1
|
||||
#y.weight = x
|
||||
print(self.weight)
|
||||
return x
|
||||
|
||||
def test_attr_ref():
|
||||
x = Tensor(4, mindspore.float32)
|
||||
net_y = Net()
|
||||
net = Net()
|
||||
ret = net(x, net_y)
|
||||
print(ret)
|
|
@ -0,0 +1,42 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import numpy as np
|
||||
|
||||
import mindspore.nn as nn
|
||||
import mindspore
|
||||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.ops import operations as P
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, save_graphs=True, save_graphs_path="graph_paths")
|
||||
|
||||
|
||||
class Net(nn.Cell):
|
||||
def __init__(self,):
|
||||
super().__init__()
|
||||
self.matmul = P.MatMul()
|
||||
|
||||
def construct(self, x, y):
|
||||
out = self.matmul(x, y)
|
||||
return out
|
||||
|
||||
def test_call():
|
||||
x = Tensor(np.ones(shape=[1, 3]), mindspore.float32)
|
||||
y = Tensor(np.ones(shape=[3, 4]), mindspore.float32)
|
||||
net = Net()
|
||||
ret = net(x, y)
|
||||
print(ret)
|
|
@ -0,0 +1,51 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import mindspore.nn as nn
|
||||
import mindspore
|
||||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, save_graphs=True, save_graphs_path="graph_paths")
|
||||
|
||||
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
return x > y
|
||||
|
||||
|
||||
def test_compare_bool_vs_bool():
|
||||
net = Net()
|
||||
ret = net(True, True)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_compare_bool_vs_int():
|
||||
net = Net()
|
||||
ret = net(True, 1)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_compare_tensor_int_vs_tensor_float():
|
||||
x = Tensor(1, mindspore.int32)
|
||||
y = Tensor(1.5, mindspore.float64)
|
||||
net = Net()
|
||||
ret = net(x, y)
|
||||
print(ret)
|
|
@ -0,0 +1,61 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE)
|
||||
|
||||
|
||||
class IdentityIs(nn.Cell):
|
||||
def __init__(self, x, y):
|
||||
super(IdentityIs, self).__init__()
|
||||
self.x = x
|
||||
self.y = y
|
||||
|
||||
def construct(self):
|
||||
in_v = self.x is self.y
|
||||
return in_v
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_is_int():
|
||||
net = IdentityIs(1, 2)
|
||||
ret = net()
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_is_none():
|
||||
net = IdentityIs(1, None)
|
||||
ret = net()
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_is_true():
|
||||
net = IdentityIs(1, True)
|
||||
ret = net()
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_bool_is_none():
|
||||
net = IdentityIs(True, None)
|
||||
ret = net()
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_bool_is_false():
|
||||
net = IdentityIs(True, False)
|
||||
ret = net()
|
||||
print(ret)
|
|
@ -0,0 +1,61 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE)
|
||||
|
||||
|
||||
class IdentityIsNot(nn.Cell):
|
||||
def __init__(self, x, y):
|
||||
super(IdentityIsNot, self).__init__()
|
||||
self.x = x
|
||||
self.y = y
|
||||
|
||||
def construct(self):
|
||||
in_v = self.x is not self.y
|
||||
return in_v
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_is_not_int():
|
||||
net = IdentityIsNot(1, 2)
|
||||
ret = net()
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_is_not_none():
|
||||
net = IdentityIsNot(1, None)
|
||||
ret = net()
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_is_not_true():
|
||||
net = IdentityIsNot(1, True)
|
||||
ret = net()
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_bool_is_not_none():
|
||||
net = IdentityIsNot(True, None)
|
||||
ret = net()
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_bool_is_not_false():
|
||||
net = IdentityIsNot(True, False)
|
||||
ret = net()
|
||||
print(ret)
|
|
@ -0,0 +1,34 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, save_graphs=True, save_graphs_path="graph_paths")
|
||||
|
||||
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x):
|
||||
return ~x
|
||||
|
||||
def test_invert():
|
||||
net = Net()
|
||||
ret = net(1)
|
||||
print(ret)
|
|
@ -0,0 +1,152 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import numpy as np
|
||||
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE)
|
||||
|
||||
|
||||
class LogicAnd(nn.Cell):
|
||||
def __init__(self):
|
||||
super(LogicAnd, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
and_v = x and y
|
||||
return and_v
|
||||
|
||||
|
||||
class LogicAndSpec(nn.Cell):
|
||||
def __init__(self, x, y):
|
||||
super(LogicAndSpec, self).__init__()
|
||||
self.x = x
|
||||
self.y = y
|
||||
|
||||
def construct(self, x, y):
|
||||
and_v = self.x and self.y
|
||||
return and_v
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_int_and_int():
|
||||
net = LogicAnd()
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_float_and_float():
|
||||
net = LogicAnd()
|
||||
ret = net(1.89, 1.99)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_float_and_int():
|
||||
net = LogicAnd()
|
||||
ret = net(1.89, 1)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_tensor_1_int_and_tensor_1_int():
|
||||
net = LogicAnd()
|
||||
x = Tensor(np.ones([1], np.int32))
|
||||
y = Tensor(np.zeros([1], np.int32))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_tensor_1_float_and_tensor_1_int():
|
||||
net = LogicAnd()
|
||||
x = Tensor(np.ones([1], np.float))
|
||||
y = Tensor(np.zeros([1], np.int32))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_tensor_1_int_and_int():
|
||||
net = LogicAnd()
|
||||
x = Tensor(np.ones([1], np.int32))
|
||||
y = 2
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_tensor_2X2_int_and_tensor_2X2_int():
|
||||
net = LogicAnd()
|
||||
x = Tensor(np.ones([2, 2], np.int32))
|
||||
y = Tensor(np.zeros([2, 2], np.int32))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_int_and_str():
|
||||
net = LogicAnd()
|
||||
ret = net(1, "cba")
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_int_and_str_2():
|
||||
net = LogicAndSpec(1, "cba")
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_str_and_str():
|
||||
net = LogicAndSpec("abc", "cba")
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_list_int_and_list_int():
|
||||
net = LogicAnd()
|
||||
ret = net([1, 2, 3], [3, 2, 1])
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_list_int_and_int():
|
||||
net = LogicAnd()
|
||||
ret = net([1, 2, 3], 1)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_list_int_and_str():
|
||||
net = LogicAndSpec([1, 2, 3], "aaa")
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_list_int_and_list_str():
|
||||
net = LogicAndSpec([1, 2, 3], ["1", "2", "3"])
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_list_int_and_list_str_var():
|
||||
left = [1, 2, 3]
|
||||
right = ["1", "2", "3"]
|
||||
net = LogicAndSpec(left, right)
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_list_str_and_tensor_int():
|
||||
left = ["1", "2", "3"]
|
||||
right = Tensor(np.ones([2, 2], np.int32))
|
||||
net = LogicAndSpec(left, right)
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
|
@ -0,0 +1,135 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import numpy as np
|
||||
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE)
|
||||
|
||||
|
||||
class LogicNot(nn.Cell):
|
||||
def __init__(self):
|
||||
super(LogicNot, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x):
|
||||
not_v = not x
|
||||
return not_v
|
||||
|
||||
|
||||
class LogicNotSpec(nn.Cell):
|
||||
def __init__(self, x):
|
||||
super(LogicNotSpec, self).__init__()
|
||||
self.x = x
|
||||
|
||||
def construct(self, x):
|
||||
not_v = not self.x
|
||||
return not_v
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_int():
|
||||
net = LogicNot()
|
||||
ret = net(1)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_float():
|
||||
net = LogicNot()
|
||||
ret = net(1.89)
|
||||
print(ret)
|
||||
|
||||
def test_ms_syntax_operator_logic_tensor_1_int():
|
||||
net = LogicNot()
|
||||
x = Tensor(np.ones([1], np.int32))
|
||||
ret = net(x)
|
||||
print(ret)
|
||||
|
||||
def test_ms_syntax_operator_logic_not_tensor_1_float():
|
||||
net = LogicNot()
|
||||
x = Tensor(np.ones([1], np.float))
|
||||
ret = net(x)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_tensor_2X2_int():
|
||||
net = LogicNot()
|
||||
x = Tensor(np.ones([2, 2], np.int32))
|
||||
ret = net(x)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_tensor_2X2_float():
|
||||
net = LogicNot()
|
||||
x = Tensor(np.ones([2, 2], np.float))
|
||||
ret = net(x)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_str():
|
||||
net = LogicNotSpec("cba")
|
||||
ret = net(1)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_list_int():
|
||||
net = LogicNot()
|
||||
ret = net([1, 2, 3])
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_list_float():
|
||||
net = LogicNot()
|
||||
ret = net([1.0, 2.0, 3.0])
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_list_str():
|
||||
net = LogicNotSpec(["1", "2", "3"])
|
||||
ret = net(1)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_list_combine():
|
||||
net = LogicNotSpec([1, "2", 3])
|
||||
ret = net(1)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_tuple_int():
|
||||
net = LogicNot()
|
||||
ret = net((1, 2, 3))
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_tuple_float():
|
||||
net = LogicNot()
|
||||
ret = net((1.0, 2.0, 3.0))
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_tuple_str():
|
||||
net = LogicNotSpec(("1", "2", "3"))
|
||||
ret = net(1)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_not_tuple_combine():
|
||||
net = LogicNotSpec((1, "2", 3))
|
||||
ret = net(1)
|
||||
print(ret)
|
|
@ -0,0 +1,136 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import numpy as np
|
||||
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE)
|
||||
|
||||
|
||||
class LogicOr(nn.Cell):
|
||||
def __init__(self):
|
||||
super(LogicOr, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
or_v = x or y
|
||||
return or_v
|
||||
|
||||
|
||||
class LogicOrSpec(nn.Cell):
|
||||
def __init__(self, x, y):
|
||||
super(LogicOrSpec, self).__init__()
|
||||
self.x = x
|
||||
self.y = y
|
||||
|
||||
def construct(self, x, y):
|
||||
or_v = self.x or self.y
|
||||
return or_v
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_int_or_int():
|
||||
net = LogicOr()
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_float_or_float():
|
||||
net = LogicOr()
|
||||
ret = net(1.89, 1.99)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_float_or_int():
|
||||
net = LogicOr()
|
||||
ret = net(1.89, 1)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_tensor_1_int_or_tensor_1_int():
|
||||
net = LogicOr()
|
||||
x = Tensor(np.ones([1], np.int32))
|
||||
y = Tensor(np.zeros([1], np.int32))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_tensor_1_float_or_tensor_1_int():
|
||||
net = LogicOr()
|
||||
x = Tensor(np.ones([1], np.float))
|
||||
y = Tensor(np.zeros([1], np.int32))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_tensor_2X2_int_or_tensor_2X2_int():
|
||||
net = LogicOr()
|
||||
x = Tensor(np.ones([2, 2], np.int32))
|
||||
y = Tensor(np.zeros([2, 2], np.int32))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_int_or_str():
|
||||
net = LogicOr()
|
||||
ret = net(1, "cba")
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_int_or_str_2():
|
||||
net = LogicOrSpec(1, "cba")
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_str_or_str():
|
||||
net = LogicOrSpec("abc", "cba")
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_list_int_or_list_int():
|
||||
net = LogicOr()
|
||||
ret = net([1, 2, 3], [3, 2, 1])
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_list_int_or_int():
|
||||
net = LogicOr()
|
||||
ret = net([1, 2, 3], 1)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_list_int_or_str():
|
||||
net = LogicOrSpec([1, 2, 3], "aaa")
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_list_int_or_list_str():
|
||||
net = LogicOrSpec([1, 2, 3], ["1", "2", "3"])
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_logic_list_int_or_list_str_var():
|
||||
left = [1, 2, 3]
|
||||
right = ["1", "2", "3"]
|
||||
net = LogicOrSpec(left, right)
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
|
@ -0,0 +1,140 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import numpy as np
|
||||
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE)
|
||||
|
||||
|
||||
class MemberIn(nn.Cell):
|
||||
def __init__(self):
|
||||
super(MemberIn, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
in_v = x in y
|
||||
return in_v
|
||||
|
||||
|
||||
class MemberInSpec(nn.Cell):
|
||||
def __init__(self, x, y):
|
||||
super(MemberInSpec, self).__init__()
|
||||
self.x = x
|
||||
self.y = y
|
||||
|
||||
|
||||
def construct(self, x, y):
|
||||
in_v = self.x in self.y
|
||||
return in_v
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_in_int():
|
||||
net = MemberIn()
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_in_list_int():
|
||||
net = MemberIn()
|
||||
ret = net(1, [1, 2])
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_in_list_str():
|
||||
net = MemberInSpec(1, ["1", "2"])
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_in_list_combine():
|
||||
net = MemberInSpec(1, ["1", 2])
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_in_tuple_int():
|
||||
net = MemberIn()
|
||||
ret = net(1, (1, 2))
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_in_tuple_str():
|
||||
net = MemberInSpec(1, ("1", 2))
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_in_dict_str():
|
||||
dict_y = {"1": 2, "2": 3}
|
||||
net = MemberInSpec(1, dict_y)
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_str_in_dict_str():
|
||||
dict_y = {"1": 2, "2": 3}
|
||||
net = MemberInSpec("1", dict_y)
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_str_in_dict_combine():
|
||||
dict_y = {"1": 2, 2: 3}
|
||||
net = MemberInSpec("1", dict_y)
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_in_dict_combine():
|
||||
dict_y = {"1": 2, 2: 3}
|
||||
net = MemberInSpec(1, dict_y)
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_tensor_in_list_tensor():
|
||||
net = MemberIn()
|
||||
x = Tensor(np.ones([2, 2], np.int32))
|
||||
y = Tensor(np.zeros([2, 2], np.int32))
|
||||
ret = net(x, [x, y])
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_tensor_in_list_combine():
|
||||
x = Tensor(np.ones([2, 2], np.int32))
|
||||
y = Tensor(np.zeros([2, 2], np.int32))
|
||||
net = MemberInSpec(x, [y, "a"])
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_tensor_in_tuple_tensor():
|
||||
net = MemberIn()
|
||||
x = Tensor(np.ones([2, 2], np.int32))
|
||||
y = Tensor(np.zeros([2, 2], np.int32))
|
||||
ret = net(x, (x, y))
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_tensor_in_tuple_combine():
|
||||
x = Tensor(np.ones([2, 2], np.int32))
|
||||
net = MemberInSpec(x, (x, "a"))
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
|
@ -0,0 +1,139 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import numpy as np
|
||||
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE)
|
||||
|
||||
|
||||
class MemberNotIn(nn.Cell):
|
||||
def __init__(self):
|
||||
super(MemberNotIn, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
not_in_v = x not in y
|
||||
return not_in_v
|
||||
|
||||
|
||||
class MemberNotInSpec(nn.Cell):
|
||||
def __init__(self, x, y):
|
||||
super(MemberNotInSpec, self).__init__()
|
||||
self.x = x
|
||||
self.y = y
|
||||
|
||||
def construct(self, x, y):
|
||||
not_in_v = self.x not in self.y
|
||||
return not_in_v
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_not_in_int():
|
||||
net = MemberNotIn()
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_not_in_list_int():
|
||||
net = MemberNotIn()
|
||||
ret = net(1, [1, 2])
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_not_in_list_str():
|
||||
net = MemberNotInSpec(1, ["1", "2"])
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_not_in_list_combine():
|
||||
net = MemberNotInSpec(1, ["1", 2])
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_not_in_tuple_int():
|
||||
net = MemberNotIn()
|
||||
ret = net(1, (1, 2))
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_not_in_tuple_str():
|
||||
net = MemberNotInSpec(1, ("1", 2))
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_not_in_dict_int():
|
||||
dict_y = {1: 2, 2: 3}
|
||||
net = MemberNotInSpec(1, dict_y)
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_int_not_in_dict_str():
|
||||
dict_y = {"1": 2, "2": 3}
|
||||
net = MemberNotInSpec(1, dict_y)
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_str_not_in_dict_str():
|
||||
dict_y = {"1": 2, "2": 3}
|
||||
net = MemberNotInSpec("1", dict_y)
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_str_not_in_dict_combine():
|
||||
dict_y = {"1": 2, 2: 3}
|
||||
net = MemberNotInSpec("1", dict_y)
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_tensor_not_in_list_tensor():
|
||||
net = MemberNotIn()
|
||||
x = Tensor(np.ones([2, 2], np.int32))
|
||||
y = Tensor(np.zeros([2, 2], np.int32))
|
||||
ret = net(x, [x, y])
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_tensor_not_in_list_combine():
|
||||
x = Tensor(np.ones([2, 2], np.int32))
|
||||
y = Tensor(np.zeros([2, 2], np.int32))
|
||||
net = MemberNotInSpec(x, [y, "a"])
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_tensor_not_in_tuple_tensor():
|
||||
net = MemberNotIn()
|
||||
x = Tensor(np.ones([2, 2], np.int32))
|
||||
y = Tensor(np.zeros([2, 2], np.int32))
|
||||
ret = net(x, (x, y))
|
||||
print(ret)
|
||||
|
||||
|
||||
def test_ms_syntax_operator_tensor_not_in_tuple_combine():
|
||||
x = Tensor(np.ones([2, 2], np.int32))
|
||||
net = MemberNotInSpec(x, (x, "a"))
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
|
@ -0,0 +1,373 @@
|
|||
import numpy as np
|
||||
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
from mindspore.common.parameter import Parameter
|
||||
from mindspore.common.tensor import Tensor
|
||||
import mindspore.ops as ops
|
||||
import mindspore
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, save_graphs=True, save_graphs_path="graph_path")
|
||||
|
||||
|
||||
class TestNoReturn(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestNoReturn, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
and_v = x * y
|
||||
and_v += 1
|
||||
# return and_v
|
||||
|
||||
|
||||
def test_no_return():
|
||||
net = TestNoReturn()
|
||||
x = Tensor(np.ones([2, 2], np.float))
|
||||
y = Tensor(np.zeros([2, 2], np.float))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestSuper(nn.Cell):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
super(TestSuper, 2, 3).aa()
|
||||
and_v = x * y
|
||||
return and_v
|
||||
|
||||
|
||||
def test_super():
|
||||
net = TestSuper()
|
||||
x = Tensor(np.ones([2, 2], np.float))
|
||||
y = Tensor(np.zeros([2, 2], np.float))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestCompare(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestCompare, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
return x > y > 10
|
||||
|
||||
|
||||
def test_compare():
|
||||
net = TestCompare()
|
||||
x = Tensor(np.ones([2, 2], np.float))
|
||||
y = Tensor(np.zeros([2, 2], np.float))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestUndefMemberChange(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestUndefMemberChange, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
self.t = x
|
||||
return x > y
|
||||
|
||||
|
||||
def test_undef_member_changer():
|
||||
net = TestUndefMemberChange()
|
||||
x = Tensor(np.ones([2, 2], np.float))
|
||||
y = Tensor(np.zeros([2, 2], np.float))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestMemberChange(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestMemberChange, self).__init__()
|
||||
self.t = Tensor(np.zeros([2, 2], np.float))
|
||||
|
||||
def construct(self, x, y):
|
||||
self.t = x
|
||||
return x > y
|
||||
|
||||
|
||||
def test_member_changer():
|
||||
net = TestMemberChange()
|
||||
x = Tensor(np.ones([2, 2], np.float))
|
||||
y = Tensor(np.zeros([2, 2], np.float))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestUnsupportSTMT(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestUnsupportSTMT, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
try:
|
||||
val = x + y
|
||||
finally:
|
||||
val = x
|
||||
return val
|
||||
|
||||
|
||||
def test_UnsupportSTMT():
|
||||
net = TestUnsupportSTMT()
|
||||
x = Tensor(np.ones([2, 2], np.float))
|
||||
y = Tensor(np.zeros([2, 2], np.float))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestUnsupportNum(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestUnsupportNum, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
a = x + 3.14j
|
||||
return a
|
||||
|
||||
|
||||
def test_UnsupportNum():
|
||||
net = TestUnsupportNum()
|
||||
x = Tensor(np.ones([2, 2], np.float))
|
||||
y = Tensor(np.zeros([2, 2], np.float))
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestAssignAdd(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestAssignAdd, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
x.id_ += y
|
||||
# x[1] += y
|
||||
return x
|
||||
|
||||
|
||||
def test_AssignAdd():
|
||||
net = TestAssignAdd()
|
||||
ret = net([3, 1], 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestParseListComp(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestParseListComp, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
ret = [m + y for l in x for m in l]
|
||||
return ret
|
||||
|
||||
|
||||
def test_ParseListComp():
|
||||
net = TestParseListComp()
|
||||
|
||||
ret = net([[1, 2], [3, 4]], 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestAssign(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestAssign, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
x.id_ = y
|
||||
return x
|
||||
|
||||
|
||||
def test_Assign():
|
||||
net = TestAssign()
|
||||
ret = net([3, 1], 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestAssignList(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestAssignList, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, y):
|
||||
[m, n] = [x, y]
|
||||
return m, n
|
||||
|
||||
|
||||
def test_AssignList():
|
||||
net = TestAssignList()
|
||||
ret = net([3, 1], 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestParaDef(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestParaDef, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x=1, y=1):
|
||||
ret = x + y
|
||||
return ret
|
||||
|
||||
|
||||
def test_para_def():
|
||||
net = TestParaDef()
|
||||
ret = net(1, 2)
|
||||
print(ret)
|
||||
|
||||
|
||||
class TestParameterNameNone(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestParameterNameNone, self).__init__()
|
||||
self.matmul = ops.MatMul()
|
||||
# self.weight = Parameter(Tensor(np.ones((1, 2)), mindspore.float32), name="w", requires_grad=True)
|
||||
self.weight = Parameter(Tensor(np.ones((1, 2)), mindspore.float32), name=None, requires_grad=True)
|
||||
|
||||
def construct(self, x):
|
||||
out = self.matmul(self.weight, x)
|
||||
return out
|
||||
|
||||
|
||||
def test_parameter_name_none():
|
||||
net = TestParameterNameNone()
|
||||
x = Tensor(np.ones((2, 1)), mindspore.float32)
|
||||
print(net(x))
|
||||
|
||||
|
||||
class TestBranchReturn(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestBranchReturn, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x):
|
||||
if x > 0:
|
||||
return x + 1
|
||||
|
||||
return x
|
||||
|
||||
|
||||
def test_branch_return():
|
||||
net = TestBranchReturn()
|
||||
print(net(1))
|
||||
|
||||
|
||||
class TestSliceNotInt(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestSliceNotInt, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x):
|
||||
s = "ABCDEFGHIJKL"
|
||||
sl = slice(x, 4.5)
|
||||
return s[sl]
|
||||
|
||||
|
||||
def test_slice_not_int():
|
||||
net = TestSliceNotInt()
|
||||
print(net(1))
|
||||
|
||||
|
||||
class TestSliceNotIntDefInInit(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestSliceNotIntDefInInit, self).__init__()
|
||||
self.sl = slice(1, 4.5)
|
||||
|
||||
def construct(self, x):
|
||||
s = "ABCDEFGHIJKL"
|
||||
return s[self.sl]
|
||||
|
||||
|
||||
def test_slice_not_int_def_in_init():
|
||||
net = TestSliceNotIntDefInInit()
|
||||
print(net(1))
|
||||
|
||||
|
||||
class MatMulCell(nn.Cell):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x):
|
||||
return x
|
||||
|
||||
|
||||
class TestCellPipelineStage(nn.Cell):
|
||||
def __init__(self, strategy1, strategy2, param=None):
|
||||
super().__init__()
|
||||
self.block = nn.CellList()
|
||||
cell = MatMulCell()
|
||||
cell.pipeline_stage = -1
|
||||
self.block.append(cell)
|
||||
cell = MatMulCell()
|
||||
cell.pipeline_stage = -1
|
||||
self.block.append(cell)
|
||||
|
||||
def construct(self, x):
|
||||
for i in range(2):
|
||||
x = self.block[i](x)
|
||||
return x
|
||||
|
||||
|
||||
def test_cell_pipeline_state():
|
||||
strategy1 = Tensor((4, 1), mindspore.int64)
|
||||
strategy2 = Tensor((2, 1), mindspore.int64)
|
||||
net = TestCellPipelineStage(strategy1, strategy2)
|
||||
print(net(1))
|
||||
|
||||
|
||||
class TestArgsKwArgs(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestArgsKwArgs, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, *args, **kwargs):
|
||||
x = 0
|
||||
for v in args:
|
||||
x += v
|
||||
|
||||
# for k, v in kwargs.items():
|
||||
# x += v
|
||||
return x
|
||||
|
||||
|
||||
def test_args_kwargs():
|
||||
net = TestArgsKwArgs()
|
||||
print(net(1, 2, 3, 4, k1=5, k2=6))
|
||||
|
||||
|
||||
class TestArgs(nn.Cell):
|
||||
def __init__(self):
|
||||
super(TestArgs, self).__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x, *args):
|
||||
for v in args:
|
||||
x += v
|
||||
|
||||
return x
|
||||
|
||||
|
||||
def test_args():
|
||||
net = TestArgs()
|
||||
print(net(1, 2, 3, 4))
|
||||
|
||||
|
||||
class TestNoDef(nn.Cell):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.m = 1
|
||||
|
||||
def construct(self, x):
|
||||
x += self.y
|
||||
return x
|
||||
|
||||
|
||||
def test_no_def():
|
||||
net = TestNoDef()
|
||||
print(net(1))
|
|
@ -0,0 +1,82 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test syntax for logic expression """
|
||||
|
||||
import numpy as np
|
||||
|
||||
import mindspore.nn as nn
|
||||
import mindspore
|
||||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.ops import operations as P
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, save_graphs=True, save_graphs_path="graph_paths")
|
||||
|
||||
|
||||
class ArgumentNum(nn.Cell):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.matmul = P.MatMul()
|
||||
|
||||
def construct(self, x, y):
|
||||
super(ArgumentNum, 2, 3).aa()
|
||||
out = self.matmul(x, y)
|
||||
return out
|
||||
|
||||
|
||||
def test_super_argument_num():
|
||||
x = Tensor(np.ones(shape=[1, 3]), mindspore.float32)
|
||||
y = Tensor(np.ones(shape=[3, 4]), mindspore.float32)
|
||||
net = ArgumentNum()
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
class ArgumentNotSelf(nn.Cell):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.matmul = P.MatMul()
|
||||
|
||||
def construct(self, x, y):
|
||||
super(ArgumentNotSelf, 2).aa()
|
||||
out = self.matmul(x, y)
|
||||
return out
|
||||
|
||||
|
||||
def test_super_argument_not_self():
|
||||
x = Tensor(np.ones(shape=[1, 3]), mindspore.float32)
|
||||
y = Tensor(np.ones(shape=[3, 4]), mindspore.float32)
|
||||
net = ArgumentNotSelf()
|
||||
ret = net(x, y)
|
||||
print(ret)
|
||||
|
||||
|
||||
class ArgumentType(nn.Cell):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.matmul = P.MatMul()
|
||||
|
||||
def construct(self, x, y):
|
||||
super(ArgumentType, self).aa()
|
||||
out = self.matmul(x, y)
|
||||
return out
|
||||
|
||||
|
||||
def test_super_argument_type():
|
||||
x = Tensor(np.ones(shape=[1, 3]), mindspore.float32)
|
||||
y = Tensor(np.ones(shape=[3, 4]), mindspore.float32)
|
||||
net = ArgumentType()
|
||||
ret = net(x, y)
|
||||
print(ret)
|
|
@ -162,7 +162,7 @@ def test_class_member_not_defined():
|
|||
y = Tensor(np.zeros([3, 4, 5], np.int32))
|
||||
with pytest.raises(TypeError) as ex:
|
||||
net(x, y)
|
||||
assert "'self.x' was not defined in the class '__init__' function." in str(ex.value)
|
||||
assert "'self.x' should be initialized as a 'Parameter' in the '__init__' function" in str(ex.value)
|
||||
|
||||
|
||||
def test_change_list_element():
|
||||
|
|
|
@ -84,7 +84,7 @@ def test_net_with_raise():
|
|||
inp = Tensor(np.ones([1, 1, 32, 32]).astype(np.float32))
|
||||
with pytest.raises(RuntimeError) as err:
|
||||
_cell_graph_executor.compile(net, inp)
|
||||
assert "Unsupported syntax 'Raise'." in str(err.value)
|
||||
assert "Unsupported statement 'Raise'." in str(err.value)
|
||||
|
||||
|
||||
class NetAddN(nn.Cell):
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
# ============================================================================
|
||||
"""
|
||||
test mindspore grammar constraints
|
||||
1. funtion must have return statement
|
||||
1. function must have return statement
|
||||
2. raise statement can not be used
|
||||
"""
|
||||
# pylint: disable=R1705, R1710, W0223
|
||||
|
@ -60,7 +60,7 @@ def test_missing_return():
|
|||
z = Tensor(2, mstype.int32)
|
||||
with pytest.raises(TypeError) as er:
|
||||
net(x, y, z)
|
||||
assert "Missing return statement in bound method 'construct'" in str(er.value)
|
||||
assert "Function must has 'return' statement, but missing in bound method 'construct'" in str(er.value)
|
||||
|
||||
|
||||
def test_nest_function_missing_return():
|
||||
|
@ -95,7 +95,7 @@ def test_nest_function_missing_return():
|
|||
z = Tensor(2, mstype.int32)
|
||||
with pytest.raises(TypeError) as er:
|
||||
net(x, y, z)
|
||||
assert "Missing return statement in function 'g'" in str(er.value)
|
||||
assert "Function must has 'return' statement, but missing in function 'g'" in str(er.value)
|
||||
|
||||
|
||||
def test_raise_in_method():
|
||||
|
@ -118,7 +118,7 @@ def test_raise_in_method():
|
|||
z = Tensor(2, mstype.int32)
|
||||
with pytest.raises(RuntimeError) as er:
|
||||
net(x, y, z)
|
||||
assert "Unsupported syntax 'Raise'." in str(er.value)
|
||||
assert "Unsupported statement 'Raise'." in str(er.value)
|
||||
|
||||
|
||||
def test_raise_in_nested_function():
|
||||
|
@ -145,7 +145,7 @@ def test_raise_in_nested_function():
|
|||
z = Tensor(2, mstype.int32)
|
||||
with pytest.raises(RuntimeError) as er:
|
||||
net(x, y, z)
|
||||
assert "Unsupported syntax 'Raise'." in str(er.value)
|
||||
assert "Unsupported statement 'Raise'." in str(er.value)
|
||||
|
||||
|
||||
def test_nest_branch_with_return():
|
||||
|
@ -182,7 +182,7 @@ def test_any_with_no_return():
|
|||
net = NetAnyNoReturn()
|
||||
with pytest.raises(TypeError) as er:
|
||||
net(tensor)
|
||||
assert "Missing return statement in bound method 'construct'" in str(er.value)
|
||||
assert "Function must has 'return' statement, but missing in bound method 'construct'" in str(er.value)
|
||||
|
||||
|
||||
def test_missing_construct():
|
||||
|
|
|
@ -62,6 +62,6 @@ def test_list_comp():
|
|||
assert get_list_comp_4() == (36, 64, 100)
|
||||
with pytest.raises(TypeError) as ex:
|
||||
get_list_comp_5()
|
||||
assert "The `generators` supports one `comprehension` in ListComp/GeneratorExp" in str(ex.value)
|
||||
assert "The 'generators' supports 1 'comprehension' in ListComp/GeneratorExp" in str(ex.value)
|
||||
assert get_generator_exp_1() == (1, 2, 3, 4, 5)
|
||||
assert get_generator_exp_2() == (36, 64, 100)
|
||||
|
|
|
@ -98,7 +98,7 @@ def test_net_inputs_including_str():
|
|||
assert "The inputs types of the outermost network support bool, int, float, tensor, " \
|
||||
"mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), " \
|
||||
"and tuple or list containing only these types, and dict whose values are these types, " \
|
||||
"but got 1th arg is ok" in str(err.value)
|
||||
"but the 1th arg type is <class 'str'>, value is 'ok'" in str(err.value)
|
||||
|
||||
|
||||
def test_outermost_net_pass_parameter():
|
||||
|
@ -107,7 +107,8 @@ def test_outermost_net_pass_parameter():
|
|||
assert "The inputs types of the outermost network support bool, int, float, tensor, " \
|
||||
"mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), " \
|
||||
"and tuple or list containing only these types, and dict whose values are these types, " \
|
||||
"but got 1th arg is Parameter (name=weight, shape=(2, 2), dtype=Float32, requires_grad=True)" \
|
||||
"but the 1th arg type is <class 'mindspore.common.parameter.ParameterTensor'>, " \
|
||||
"value is 'Parameter (name=weight, shape=(2, 2), dtype=Float32, requires_grad=True)'" \
|
||||
in str(err.value)
|
||||
|
||||
|
||||
|
@ -117,7 +118,7 @@ def test_outermost_net_pass_tuple_including_parameter():
|
|||
assert "The inputs types of the outermost network support bool, int, float, tensor, " \
|
||||
"mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), " \
|
||||
"and tuple or list containing only these types, and dict whose values are these types, " \
|
||||
"but got 6th arg is (" in str(err.value)
|
||||
"but the 6th arg type is <class 'tuple'>, value is '(" in str(err.value)
|
||||
|
||||
|
||||
def test_outermost_net_pass_list_including_parameter():
|
||||
|
@ -126,7 +127,7 @@ def test_outermost_net_pass_list_including_parameter():
|
|||
assert "The inputs types of the outermost network support bool, int, float, tensor, " \
|
||||
"mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), " \
|
||||
"and tuple or list containing only these types, and dict whose values are these types, " \
|
||||
"but got 4th arg is [" in str(err.value)
|
||||
"but the 4th arg type is <class 'list'>, value is '[" in str(err.value)
|
||||
|
||||
|
||||
def test_grad_net_pass_dict_including_parameter():
|
||||
|
@ -135,4 +136,4 @@ def test_grad_net_pass_dict_including_parameter():
|
|||
assert "The inputs types of the outermost network support bool, int, float, tensor, " \
|
||||
"mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), " \
|
||||
"and tuple or list containing only these types, and dict whose values are these types, " \
|
||||
"but got 3th arg is {" in str(err.value)
|
||||
"but the 3th arg type is <class 'dict'>, value is '{" in str(err.value)
|
||||
|
|
|
@ -346,7 +346,7 @@ def test_call_unsupported_builtin_function_in_while():
|
|||
net = Net()
|
||||
with pytest.raises(NameError) as err:
|
||||
net(Tensor([1, 2, 3]), Tensor([4, 5, 6]))
|
||||
assert "The builtin function 'divmod' is not supported in graph mode" in str(err.value)
|
||||
assert "The builtin function 'divmod' of python is not supported in graph mode" in str(err.value)
|
||||
assert "tests/ut/python/pipeline/parse/test_use_undefined_name_or_unsupported_builtin_function.py(342)" in str(
|
||||
err.value)
|
||||
assert "ret = divmod(x, y)" in str(err.value)
|
||||
|
@ -368,7 +368,7 @@ def test_call_unsupported_builtin_function_in_if_in_for():
|
|||
net = Net()
|
||||
with pytest.raises(NameError) as err:
|
||||
net(Tensor([1, 2, 3], mstype.float32))
|
||||
assert "The builtin function 'divmod' is not supported in graph mode" in str(err.value)
|
||||
assert "The builtin function 'divmod' of python is not supported in graph mode" in str(err.value)
|
||||
assert "tests/ut/python/pipeline/parse/test_use_undefined_name_or_unsupported_builtin_function.py(364)" in \
|
||||
str(err.value)
|
||||
assert "x = divmod(x, i)" in str(err.value)
|
||||
|
|
|
@ -98,7 +98,7 @@ def test_net_inputs_including_str():
|
|||
assert "The inputs types of the outermost network support bool, int, float, tensor, " \
|
||||
"mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), " \
|
||||
"and tuple or list containing only these types, and dict whose values are these types, " \
|
||||
"but got 1th arg is ok" in str(err.value)
|
||||
"but the 1th arg type is <class 'str'>, value is 'ok'" in str(err.value)
|
||||
|
||||
|
||||
def test_outermost_net_pass_parameter():
|
||||
|
@ -107,7 +107,8 @@ def test_outermost_net_pass_parameter():
|
|||
assert "The inputs types of the outermost network support bool, int, float, tensor, " \
|
||||
"mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), " \
|
||||
"and tuple or list containing only these types, and dict whose values are these types, " \
|
||||
"but got 1th arg is Parameter (name=weight, shape=(2, 2), dtype=Float32, requires_grad=True)" \
|
||||
"but the 1th arg type is <class 'mindspore.common.parameter.ParameterTensor'>, " \
|
||||
"value is 'Parameter (name=weight, shape=(2, 2), dtype=Float32, requires_grad=True)'" \
|
||||
in str(err.value)
|
||||
|
||||
|
||||
|
@ -117,7 +118,7 @@ def test_outermost_net_pass_tuple_including_parameter():
|
|||
assert "The inputs types of the outermost network support bool, int, float, tensor, " \
|
||||
"mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), " \
|
||||
"and tuple or list containing only these types, and dict whose values are these types, " \
|
||||
"but got 6th arg is (" in str(err.value)
|
||||
"but the 6th arg type is <class 'tuple'>, value is '(" in str(err.value)
|
||||
|
||||
|
||||
def test_outermost_net_pass_list_including_parameter():
|
||||
|
@ -126,7 +127,7 @@ def test_outermost_net_pass_list_including_parameter():
|
|||
assert "The inputs types of the outermost network support bool, int, float, tensor, " \
|
||||
"mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), " \
|
||||
"and tuple or list containing only these types, and dict whose values are these types, " \
|
||||
"but got 4th arg is [" in str(err.value)
|
||||
"but the 4th arg type is <class 'list'>, value is '[" in str(err.value)
|
||||
|
||||
|
||||
def test_grad_net_pass_dict_including_parameter():
|
||||
|
@ -135,4 +136,4 @@ def test_grad_net_pass_dict_including_parameter():
|
|||
assert "The inputs types of the outermost network support bool, int, float, tensor, " \
|
||||
"mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), " \
|
||||
"and tuple or list containing only these types, and dict whose values are these types, " \
|
||||
"but got 3th arg is {" in str(err.value)
|
||||
"but the 3th arg type is <class 'dict'>, value is '{" in str(err.value)
|
||||
|
|
Loading…
Reference in New Issue