add import of ops in in ops __init__

This commit is contained in:
Wei Luning 2020-09-11 11:16:14 +08:00
parent cdb4fb815a
commit 7b6899fdc0
9 changed files with 93 additions and 21 deletions

View File

@ -297,8 +297,7 @@ AnfNodePtr BuildNewCNode(const FuncGraphPtr &func_graph, const std::string &func
// If sig is SignatureEnumRW::kRWRef, not do anything.
} else if (sig == SignatureEnumRW::kRWWrite &&
!((type->type_id() == kObjectTypeRef) || (type->type_id() == kObjectTypeRefKey))) {
MS_EXCEPTION(TypeError) << "Function " << func_name << "'s input " << i << " should be a Parameter, but "
<< type->ToString();
RaiseExceptionForCheckParameter(func_name, i, type->ToString());
}
MS_LOG(DEBUG) << "Function " << func_name << "'s input " << i << " " << param->DebugString(2) << " abs "
<< args_spec_list[i]->ToString() << " type " << type->ToString();
@ -338,5 +337,9 @@ void RaiseExceptionForConvertRefDtype(const std::string &func_name, const std::s
<< "'. The writable arg type is not equal to the largest type, "
<< "so can not cast automatically.";
}
void RaiseExceptionForCheckParameter(const std::string &func_name, int i, const std::string &source_type) {
MS_EXCEPTION(TypeError) << "Function " << func_name << "'s input " << i << " should be a Parameter, but "
<< source_type << ".";
}
} // namespace prim
} // namespace mindspore

View File

@ -58,8 +58,10 @@ using RWSignaturePtr = std::shared_ptr<DoSignatureMetaFuncGraph>;
extern const std::map<TypeId, size_t> type_map;
// shared with pynative
void RaiseExceptionForConvertRefDtype(const std::string &func_name, const std::string &ref_type,
const std::string &target_type);
void RaiseExceptionForCheckParameter(const std::string &func_name, int i, const std::string &source_type);
AnfNodePtr GenerateCNode(const FuncGraphPtr &func_graph, const std::string &func_name, const ValuePtr &function,
const AbstractBasePtrList &args_spec_list, const AnfNodePtrList &old_node_inputs);

View File

@ -279,16 +279,34 @@ void ConvertInputs(const PrimitivePyPtr &prim, const py::list &args, const OpExe
}
auto obj = out_args[i];
if (py::isinstance<tensor::Tensor>(obj)) {
auto arg = py::cast<tensor::TensorPtr>(obj);
TypeId arg_type_id = arg->data_type();
if (prim::type_map.find(arg_type_id) == prim::type_map.end() || arg_type_id == it->second) {
continue;
auto sig = signature[i].rw;
bool is_parameter = false;
bool is_same_type = false;
TypeId arg_type_id = kTypeUnknown;
bool is_sig_write = (sig == SignatureEnumRW::kRWWrite);
if (py::isinstance<tensor::MetaTensor>(obj)) {
auto arg = py::cast<tensor::MetaTensorPtr>(obj);
if (arg->is_parameter()) {
is_parameter = true;
}
if (signature[i].rw == SignatureEnumRW::kRWWrite) {
prim::RaiseExceptionForConvertRefDtype(prim->name(), TypeIdToMsTypeStr(arg_type_id),
TypeIdToMsTypeStr(it->second));
arg_type_id = arg->data_type();
}
if (arg_type_id != 0) {
is_same_type = (prim::type_map.find(arg_type_id) == prim::type_map.end() || arg_type_id == it->second);
}
if (is_sig_write) {
if (!is_parameter) {
prim::RaiseExceptionForCheckParameter(prim->name(), i, "not");
}
if (arg_type_id != 0) {
if (!is_same_type) {
prim::RaiseExceptionForConvertRefDtype(prim->name(), TypeIdToMsTypeStr(arg_type_id),
TypeIdToMsTypeStr(it->second));
}
}
}
if (is_same_type) {
continue;
}
if (!py::isinstance<tensor::Tensor>(obj) && !py::isinstance<py::int_>(obj) && !py::isinstance<py::float_>(obj)) {

View File

@ -91,7 +91,11 @@ void MetaTensor::SetDeviceInfo(const std::string &format, const TypePtr &data_ty
std::string MetaTensor::ToString() const {
std::ostringstream buf;
buf << "MetaTensor shape:[" << shape() << "]";
buf << "MetaTensor(shape=[" << shape() << "]";
if (is_parameter_) {
buf << ", name=" << param_info_->name();
}
buf << ")";
return buf.str();
}

View File

@ -525,10 +525,14 @@ std::string Tensor::ToStringInternal(int limit_size) const {
buf << "Tensor(shape=" << ShapeToString(shape_) << ", dtype=" << dtype->ToString() << ',';
if (limit_size <= 0 || DataSize() < limit_size) {
// Only print data for small tensor.
buf << ((data().ndim() > 1) ? '\n' : ' ') << data().ToString(data_type_, shape_, false) << ')';
buf << ((data().ndim() > 1) ? '\n' : ' ') << data().ToString(data_type_, shape_, false);
} else {
buf << " [...])";
buf << " [...]";
}
if (is_parameter_) {
buf << ", name=" << param_info_->name();
}
buf << ")";
return buf.str();
}

View File

@ -36,6 +36,9 @@ from .op_info_register import op_info_register, AkgGpuRegOp, AkgAscendRegOp, AiC
from .primitive import constexpr
from . import composite, operations, functional
from . import signature
from .composite import *
from .operations import *
from .functional import *
__primitive__ = [
"prim_attr_register", "Primitive", "PrimitiveWithInfer", "signature"

View File

@ -18,10 +18,10 @@ import functools
from .. import signature as sig
from ..._checkparam import Validator as validator, Rel
from ...common import dtype as mstype
from ..primitive import Primitive, PrimitiveWithInfer, prim_attr_register
from ..primitive import Primitive, PrimitiveWithCheck, PrimitiveWithInfer, prim_attr_register
class Assign(Primitive):
class Assign(PrimitiveWithCheck):
"""
Assign `Parameter` with a value.
@ -59,14 +59,10 @@ class Assign(Primitive):
def __init__(self):
self.init_prim_io_names(inputs=['ref', 'value'], outputs=['output'])
def infer_shape(self, variable, value):
return variable
def infer_dtype(self, variable, value):
def check_dtype(self, variable, value):
if variable != mstype.type_refkey:
validator.check_tensor_type_same({"variable": variable}, mstype.number_type, self.name)
validator.check_scalar_or_tensor_type_same({"value": value}, mstype.number_type, self.name)
return variable
class BoundingBoxEncode(PrimitiveWithInfer):

View File

@ -306,9 +306,28 @@ class Assign(nn.Cell):
self.cov_step = self.cov_step + x
return self.cov_step
def test_assign():
context.set_context(mode=context.GRAPH_MODE)
net = Assign()
input_data = ms.Tensor(np.array(1).astype(np.int32))
net_back = GradNet(net)
net_back(input_data)
class AssignCheck(nn.Cell):
""" NetWithNDarray definition """
def __init__(self):
super(AssignCheck, self).__init__()
self.cov_step = ms.Parameter(0.0, name="cov_step", requires_grad=False)
def construct(self, x):
self.cov_step = x
return self.cov_step
def test_assign_check_none():
context.set_context(mode=context.GRAPH_MODE)
net = AssignCheck()
with pytest.raises(TypeError):
net(None)

View File

@ -15,9 +15,11 @@
""" test implicit conversion """
import numpy as np
import pytest
import mindspore as ms
from mindspore import Tensor, nn
from mindspore import Tensor, nn, Parameter
from mindspore.ops import composite as C
from mindspore.ops import functional as F
grad_all_with_sens = C.GradOperation(get_all=True, sens_param=True)
@ -263,3 +265,24 @@ def test_int8_tensor_and_uint8_tensors_add_grad():
assert ret[1].dtype == y.dtype
assert (ret[0].asnumpy() == sens.asnumpy()).all()
assert (ret[1].asnumpy() == sens.asnumpy()).all()
class AssignCheck(nn.Cell):
""" NetWithNDarray definition """
def __init__(self):
super(AssignCheck, self).__init__()
self.cov_step = Parameter(0.0, name="cov_step", requires_grad=False)
def construct(self, x, y):
F.assign(self.cov_step, y)
F.assign(x, y)
return x
def test_assign_check_in_sig():
net = AssignCheck()
x = Tensor(2, ms.int8)
y = Tensor(3, ms.uint8)
with pytest.raises(TypeError) as e:
net(x, y)
assert "Parameter" in e.value.args[0]