!26398 [feat] [assistant] [I48OBC] add new LogicalNot

Merge pull request !26398 from 桂胜楠/logicalnot
This commit is contained in:
i-robot 2021-12-27 02:54:53 +00:00 committed by Gitee
commit c13f31861c
6 changed files with 68 additions and 28 deletions

View File

@ -1,5 +1,5 @@
/** /**
* Copyright 2020 Huawei Technologies Co., Ltd * Copyright 2020-2021 Huawei Technologies Co., Ltd
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
@ -14,19 +14,26 @@
* limitations under the License. * limitations under the License.
*/ */
#include "ops/logical_not.h" #include <map>
#include <string>
#include <set> #include <set>
#include <vector>
#include <memory>
#include "ops/logical_not.h"
#include "ops/op_utils.h" #include "ops/op_utils.h"
#include "utils/check_convert_utils.h"
namespace mindspore { namespace mindspore {
namespace ops { namespace ops {
namespace { namespace {
abstract::ShapePtr LogicalNotInferShape(const PrimitivePtr &primitive, const std::vector<AbstractBasePtr> &input_args) { abstract::ShapePtr LogicalNotInferShape(const PrimitivePtr &primitive, const std::vector<AbstractBasePtr> &input_args) {
MS_EXCEPTION_IF_NULL(primitive); MS_EXCEPTION_IF_NULL(primitive);
auto in_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[0]->BuildShape())[kShape]; auto shape_map = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[0]->BuildShape());
return std::make_shared<abstract::Shape>(in_shape); auto in_shape = shape_map[kShape];
auto min_shape = shape_map[kMinShape];
auto max_shape = shape_map[kMaxShape];
return std::make_shared<abstract::Shape>(in_shape, min_shape, max_shape);
} }
TypePtr LogicalNotInferType(const PrimitivePtr &prim, const std::vector<AbstractBasePtr> &input_args) { TypePtr LogicalNotInferType(const PrimitivePtr &prim, const std::vector<AbstractBasePtr> &input_args) {
@ -35,14 +42,21 @@ TypePtr LogicalNotInferType(const PrimitivePtr &prim, const std::vector<Abstract
MS_EXCEPTION_IF_NULL(input_args[0]); MS_EXCEPTION_IF_NULL(input_args[0]);
auto infer_dtype = input_args[0]->BuildType(); auto infer_dtype = input_args[0]->BuildType();
std::set<TypePtr> local_bool = {kBool}; std::set<TypePtr> local_bool = {kBool};
return CheckAndConvertUtils::CheckTensorTypeValid("x", infer_dtype, local_bool, op_name); (void)CheckAndConvertUtils::CheckTensorTypeValid("x", infer_dtype, local_bool, op_name);
return infer_dtype;
} }
} // namespace } // namespace
AbstractBasePtr LogicalNotInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive, AbstractBasePtr LogicalNotInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
const std::vector<AbstractBasePtr> &input_args) { const std::vector<AbstractBasePtr> &input_args) {
return std::make_shared<abstract::AbstractTensor>(LogicalNotInferType(primitive, input_args), MS_EXCEPTION_IF_NULL(primitive);
LogicalNotInferShape(primitive, input_args)->shape()); const int64_t input_num = 1;
CheckAndConvertUtils::CheckInputArgs(input_args, kGreaterEqual, input_num, primitive->name());
auto infer_type = LogicalNotInferType(primitive, input_args);
auto infer_shape = LogicalNotInferShape(primitive, input_args);
return abstract::MakeAbstract(infer_shape, infer_type);
} }
REGISTER_PRIMITIVE_C(kNameLogicalNot, LogicalNot);
REGISTER_PRIMITIVE_EVAL_IMPL(LogicalNot, prim::kPrimLogicalNot, LogicalNotInfer, nullptr, true);
} // namespace ops } // namespace ops
} // namespace mindspore } // namespace mindspore

View File

@ -1,5 +1,5 @@
/** /**
* Copyright 2020 Huawei Technologies Co., Ltd * Copyright 2020-2021 Huawei Technologies Co., Ltd
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
@ -37,9 +37,9 @@ class MS_CORE_API LogicalNot : public PrimitiveC {
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.LogicalNot for the inputs. /// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.LogicalNot for the inputs.
void Init() {} void Init() {}
}; };
AbstractBasePtr LogicalNotInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive, AbstractBasePtr LogicalNotInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
const std::vector<AbstractBasePtr> &input_args); const std::vector<AbstractBasePtr> &input_args);
using kPrimLogicalNotPtr = std::shared_ptr<LogicalNot>;
} // namespace ops } // namespace ops
} // namespace mindspore } // namespace mindspore

View File

@ -203,6 +203,7 @@ from .less_equal import _less_equal_tbe
from .less_equal_ds import _less_equal_ds_tbe from .less_equal_ds import _less_equal_ds_tbe
from .logical_and import _logical_and_tbe from .logical_and import _logical_and_tbe
from .logical_not import _logical_not_tbe from .logical_not import _logical_not_tbe
from .logical_not_ds import _logical_not_ds_tbe
from .logical_or import _logical_or_tbe from .logical_or import _logical_or_tbe
from .logical_or_ds import _logical_or_ds_tbe from .logical_or_ds import _logical_or_ds_tbe
from .reduce_max import _reduce_max_tbe from .reduce_max import _reduce_max_tbe

View File

@ -0,0 +1,37 @@
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""LogicalNot op"""
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
logical_not_op_info = TBERegOp("LogicalNot") \
.fusion_type("ELEMWISE") \
.async_flag(False) \
.binfile_name("logical_not.so") \
.compute_cost(10) \
.kernel_name("logical_not") \
.dynamic_shape(True) \
.partial_flag(True) \
.input(0, "x", False, "required", "all") \
.output(0, "y", True, "required", "all") \
.op_pattern("formatAgnostic") \
.dtype_format(DataType.BOOL_None, DataType.BOOL_None) \
.get_op_info()
@op_info_register(logical_not_op_info)
def _logical_not_ds_tbe():
"""LogicalNot TBE register"""
return

View File

@ -3855,7 +3855,7 @@ class LessEqual(_LogicBinaryOp):
""" """
class LogicalNot(PrimitiveWithInfer): class LogicalNot(Primitive):
""" """
Computes the "logical NOT" of a tensor element-wise. Computes the "logical NOT" of a tensor element-wise.
@ -3889,18 +3889,6 @@ class LogicalNot(PrimitiveWithInfer):
"""Initialize LogicalNot""" """Initialize LogicalNot"""
self.init_prim_io_names(inputs=['x'], outputs=['output']) self.init_prim_io_names(inputs=['x'], outputs=['output'])
def infer_shape(self, x_shape):
return x_shape
def infer_dtype(self, x_dtype):
validator.check_tensor_dtype_valid("x", x_dtype, [mstype.bool_], self.name + " or '~' operator")
return mstype.tensor_type(mstype.bool_)
def infer_value(self, x):
if x is not None:
x = x.asnumpy()
return Tensor(np.logical_not(x))
return None
class LogicalAnd(_LogicBinaryOp): class LogicalAnd(_LogicBinaryOp):

View File

@ -53,11 +53,11 @@ def test_invert_int_tensor():
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
with pytest.raises(TypeError) as err: with pytest.raises(TypeError) as err:
net(input_x) net(input_x)
assert "For 'LogicalNot or '~' operator', the type of 'x' should be Tensor[Bool], " \ assert "For primitive[LogicalNot], the input argument[x] must be a type of { Tensor[Bool],}, " \
"but got Tensor[Int32]" in str(err.value) "but got Int32." in str(err.value)
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
with pytest.raises(TypeError) as err: with pytest.raises(TypeError) as err:
net(input_x) net(input_x)
assert "For 'LogicalNot or '~' operator', the type of 'x' should be Tensor[Bool], " \ assert "For primitive[LogicalNot], the input argument[x] must be a type of { Tensor[Bool],}, " \
"but got Tensor[Int32]" in str(err.value) "but got Int32." in str(err.value)