support not in and add check for grad_with_sens with no sense provide.

This commit is contained in:
buxue 2020-09-16 15:49:43 +08:00
parent 9c3a494ea7
commit 458498900c
22 changed files with 187 additions and 26 deletions

View File

@ -107,7 +107,7 @@ convert_object_map = {
T.is_: F.is_,
T.is_not: F.is_not,
T.contains: multitype_ops.in_,
T.not_contains: F.not_in_dict,
T.not_contains: multitype_ops.not_in_,
# system function
T.len: M.ms_len,

View File

@ -641,7 +641,7 @@ void GradOperation::doGetGrad(const FuncGraphPtr &func_graph, AnfNodePtr out, An
// Generate the graph.
FuncGraphPtr GradOperation::GenerateFuncGraph(const AbstractBasePtrList &args_spec_list) {
if (args_spec_list.size() < 1) {
if (args_spec_list.empty()) {
MS_LOG(EXCEPTION) << "GenerateGraph requires at least 1 parameters, while the input size is "
<< args_spec_list.size() << ".";
}

View File

@ -138,7 +138,7 @@ EvalResultPtr UnpackGraphEvaluator::Run(AnalysisEnginePtr engine, const ConfigPt
auto unpack_graph = prim_->cast<prim::UnpackGraphPrimitivePtr>();
auto out_node = out_conf->node()->cast<CNodePtr>();
const auto &out_node_inputs = out_node->inputs();
if (out_node->inputs().size() == 0 || (out_node_inputs.size() - 1) != args_conf_list.size()) {
if (out_node->inputs().empty() || (out_node_inputs.size() - 1) != args_conf_list.size()) {
MS_LOG(EXCEPTION) << "UnpackGraphPrimitive"
<< " args size should equal to inputs size minus 1, but args size " << args_conf_list.size()
<< ", inputs size " << out_node_inputs.size();
@ -149,7 +149,7 @@ EvalResultPtr UnpackGraphEvaluator::Run(AnalysisEnginePtr engine, const ConfigPt
[](const ConfigPtr &ref) -> AbstractBasePtr { return ref->GetEvaluatedValue()->abstract(); });
// get the forward graph
MS_EXCEPTION_IF_NULL(args_spec_list[0]);
AbstractFunctionPtr fn = args_spec_list[0]->cast<AbstractFunctionPtr>();
auto fn = args_spec_list[0]->cast<AbstractFunctionPtr>();
if (fn == nullptr) {
MS_LOG(EXCEPTION) << "UnpackGraphPrimitive arg0 must be AbstractFunction, but " << args_spec_list[0]->ToString();
}
@ -161,6 +161,9 @@ EvalResultPtr UnpackGraphEvaluator::Run(AnalysisEnginePtr engine, const ConfigPt
GetUnpackGraphSpecArgsList(args_spec_list, unpack_graph->need_unpack_args());
AbstractBasePtrList graph_specialize_args_without_sens;
if (unpack_graph->with_sens_in_args() && graph_specialize_args.empty()) {
MS_EXCEPTION(ValueError) << "Grad with sens, but the sens is not provided.";
}
(void)std::transform(graph_specialize_args.begin(),
graph_specialize_args.end() - (unpack_graph->with_sens_in_args() ? 1 : 0),
std::back_inserter(graph_specialize_args_without_sens), [](AbstractBasePtr abs) { return abs; });
@ -177,8 +180,8 @@ EvalResultPtr UnpackGraphEvaluator::Run(AnalysisEnginePtr engine, const ConfigPt
return engine->ForwardConfig(out_conf, fn_conf);
}
AnfNodePtr MixedPrecisionCastHelper(AnfNodePtr source_node, AbstractBasePtr node_type, AnfNodePtr target_type,
FuncGraphPtr func_graph) {
AnfNodePtr MixedPrecisionCastHelper(const AnfNodePtr &source_node, const AbstractBasePtr &node_type,
const AnfNodePtr &target_type, const FuncGraphPtr &func_graph) {
AnfNodePtr target_node = source_node;
if (node_type->isa<AbstractTensor>()) {
auto x = node_type->cast<AbstractTensorPtr>();

View File

@ -27,12 +27,12 @@ from mindspore._checkparam import check_int_positive
from mindspore.ops import _selected_ops
from ..cell import Cell
__all__ = ['BatchNorm1d', 'BatchNorm2d', 'LayerNorm', 'GroupNorm', 'GlobalBatchNorm']
class _BatchNorm(Cell):
"""Batch Normalization base class."""
@cell_attr_register
def __init__(self,
num_features,
@ -132,7 +132,6 @@ class _BatchNorm(Cell):
group_list = [list(i) for i in world_rank_list]
return group_list
def _global_sync(self, x, axes, re_shape):
"""calculate global batch normalization output"""
x_mean = self.reduce_mean(x, axes)

View File

@ -38,6 +38,7 @@ from .logical_or_impl import logical_or
from .logic_not_impl import logical_not
from .uadd_impl import uadd
from .in_impl import in_
from .not_in_impl import not_in_
__all__ = [
'add',
'sub',
@ -61,5 +62,6 @@ __all__ = [
'logical_and',
'logical_or',
'logical_not',
'in_'
'in_',
'not_in_'
]

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""add_impl"""
"""Implementation for internal polymorphism `add` operations."""
from ...composite import base
from ... import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""div_impl"""
"""Implementation for internal polymorphism `div` operations."""
from ...composite import base
from ... import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""equal_impl"""
"""Implementation for internal polymorphism `equal` operations."""
from . import _constexpr_utils as const_utils
from ...composite import base
from ... import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""Implementation for getitem."""
"""Implementation for internal polymorphism `getitem` operations."""
from . import _compile_utils as compile_utils
from .. import base
from ... import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""greater_equal_impl"""
"""Implementation for internal polymorphism `greater_equal` operations."""
from mindspore.ops.composite import base
from mindspore.ops import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""equal_impl"""
"""Implementation for internal polymorphism `greater` operations."""
from mindspore.ops.composite import base
from mindspore.ops import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""in_impl"""
"""Implementation for internal polymorphism `in` operations."""
from . import _constexpr_utils as const_utils
from ... import functional as F
@ -21,7 +21,7 @@ from ...composite import base
in_ = base.MultitypeFuncGraph("in", True)
"""
in_ is a metafuncgraph object which will determine if a in b
"in_" is a multi type func graph object which will determine if a in b
using ".register" decorator
"""

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""less_equal_impl"""
"""Implementation for internal polymorphism `less_equal` operations."""
from mindspore.ops.composite import base
from mindspore.ops import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""equal_impl"""
"""Implementation for internal polymorphism `less` operations."""
from mindspore.ops.composite import base
from mindspore.ops import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""logical_not_impl"""
"""Implementation for internal polymorphism `logical not` operations."""
from mindspore.ops.composite import base
from mindspore.ops import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""logical_and_impl"""
"""Implementation for internal polymorphism `logical and` operations."""
from mindspore.ops.composite import base
from mindspore.ops import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""logical_or_impl"""
"""Implementation for internal polymorphism `logical or` operations."""
from mindspore.ops.composite import base
from mindspore.ops import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""not_equal_impl"""
"""Implementation for internal polymorphism `not equal` operations."""
from ...composite import base
from ... import functional as F

View File

@ -0,0 +1,101 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Implementation for internal polymorphism `not in` operations."""
from . import _constexpr_utils as const_utils
from ... import functional as F
from ...composite import base
not_in_ = base.MultitypeFuncGraph("not_in", True)
"""
"not_in_" is a multi type func graph object which will determine if a not in b.
using ".register" decorator
"""
@not_in_.register("Number", "Tuple")
def _number_not_in_tuple(x, y):
"""
Determine if a number not in tuple.
Args:
x (Number): x
y (tuple): y
Returns:
bool, if x not in y return true, x in y return false.
"""
return not const_utils.scalar_in_sequence(x, y)
@not_in_.register("Number", "List")
def _number_not_in_list(x, y):
"""
Determine if a number not in list.
Args:
x (Number): x
y (list): y
Returns:
bool, if x not in y return true, x in y return false.
"""
return not const_utils.scalar_in_sequence(x, y)
@not_in_.register("String", "Tuple")
def _string_not_in_tuple(x, y):
"""
Determine if a str not in a tuple.
Args:
x (str): x
y (tuple): y
Returns:
bool, if x not in y return true, x in y return false.
"""
return not const_utils.scalar_in_sequence(x, y)
@not_in_.register("String", "List")
def _string_not_in_list(x, y):
"""
Determine if a str not in a list.
Args:
x (str): x
y (list): y
Returns:
bool, if x not in y return true, x in y return false.
"""
return not const_utils.scalar_in_sequence(x, y)
@not_in_.register("String", "Dictionary")
def _str_not_in_dict(x, y):
"""
Determine if a str not in dict.
Args:
x: str
y: dict
Returns:
bool, if x not in y return true, x in y return false.
"""
return F.not_in_dict(x, y)

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""Implementation for setitem."""
"""Implementation for internal polymorphism `setitem` operations."""
from . import _compile_utils as compile_utils
from ... import functional as F

View File

@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""uadd_impl"""
"""Implementation for internal polymorphism `uadd` operations."""
from mindspore.ops.composite import base
# uadd is a metagraph object which will return operation result regarding input

View File

@ -0,0 +1,56 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
""" test not in"""
import numpy as np
import mindspore.nn as nn
from mindspore import context, Tensor
context.set_context(mode=context.GRAPH_MODE)
def test_number_not_in_tuple():
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.tuple_ = (2, 3, 4)
self.list_ = [2, 3, 4]
self.dict_ = {"a": Tensor(np.ones([1, 2, 3], np.int32)),
"b": Tensor(np.ones([1, 2, 3], np.int32)),
"c": Tensor(np.ones([1, 2, 3], np.int32))}
self.number_in = 3
self.number_not_in = 5
self.str_in = "a"
self.str_not_in = "e"
def construct(self):
ret = 0
if self.number_in not in self.tuple_:
ret += 1
if self.number_not_in not in self.tuple_:
ret += 1
if self.number_in not in self.list_:
ret += 3
if self.number_not_in not in self.list_:
ret += 3
if self.str_in not in self.dict_:
ret += 5
if self.str_not_in not in self.dict_:
ret += 5
return ret
net = Net()
output = net()
assert output == 9