!23806 [feat] [assistant] [I48OFJ] add dynamic shape for abs operator

Merge pull request !23806 from 彭涛/op_abs
This commit is contained in:
i-robot 2021-09-30 03:34:46 +00:00 committed by Gitee
commit 078b867d83
3 changed files with 53 additions and 5 deletions

View File

@ -38,9 +38,17 @@ void ImpleAbs(void *origin, void *target, size_t size) {
}
}
abstract::ShapePtr AbsInferShape(const std::vector<AbstractBasePtr> &input_args) {
auto in_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[0]->GetShapeTrack())[kShape];
return std::make_shared<abstract::Shape>(in_shape);
abstract::ShapePtr AbsInferShape(const PrimitivePtr &primitive, const std::vector<AbstractBasePtr> &input_args) {
MS_EXCEPTION_IF_NULL(primitive);
auto prim_name = primitive->name();
(void)CheckAndConvertUtils::CheckInteger("input number", SizeToLong(input_args.size()), kEqual, 1, prim_name);
MS_EXCEPTION_IF_NULL(input_args[0]);
CheckAndConvertUtils::CheckArgs<abstract::AbstractTensor>(prim_name, input_args, 0);
auto x = input_args[0]->BuildShape();
MS_EXCEPTION_IF_NULL(x);
auto shape_element = x->cast<abstract::ShapePtr>();
MS_EXCEPTION_IF_NULL(shape_element);
return shape_element;
}
TypePtr AbsInferType(const PrimitivePtr &prim, const std::vector<AbstractBasePtr> &input_args) {
@ -55,7 +63,7 @@ AbstractBasePtr AbsInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr
const int64_t input_num = 1;
CheckAndConvertUtils::CheckInputArgs(input_args, kEqual, input_num, primitive->name());
return abstract::MakeAbstract(AbsInferShape(input_args), AbsInferType(primitive, input_args));
return abstract::MakeAbstract(AbsInferShape(primitive, input_args), AbsInferType(primitive, input_args));
}
ValuePtr AbsInferValue(const PrimitivePtr &prim, const std::vector<AbstractBasePtr> &input_args) {
@ -75,7 +83,7 @@ ValuePtr AbsInferValue(const PrimitivePtr &prim, const std::vector<AbstractBaseP
auto data_size = x_tensor->DataSize();
auto dtype = x_tensor->data_type();
auto shape = AbsInferShape(input_args);
auto shape = AbsInferShape(prim, input_args);
auto result_tensor = std::make_shared<tensor::Tensor>(dtype, shape->shape());
auto x_datac = x_tensor->data_c();
auto result_datac = result_tensor->data_c();

View File

@ -16,6 +16,7 @@
"""tbe ops"""
from .celu import _celu_tbe
from .abs import _abs_tbe
from .abs_ds import _abs_ds_tbe
from .inplace_add import _inplace_add_tbe
from .inplace_sub import _inplace_sub_tbe
from .abs_grad import _abs_grad_tbe

View File

@ -0,0 +1,39 @@
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Abs op"""
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
abs_op_info = TBERegOp("Abs") \
.fusion_type("ELEMWISE") \
.async_flag(False) \
.binfile_name("abs.so") \
.compute_cost(10) \
.kernel_name("abs") \
.partial_flag(True) \
.dynamic_shape(True) \
.op_pattern("formatAgnostic") \
.input(0, "x", None, "required", None) \
.output(0, "y", True, "required", "all") \
.dtype_format(DataType.F16_None, DataType.F16_None) \
.dtype_format(DataType.F32_None, DataType.F32_None) \
.dtype_format(DataType.I32_None, DataType.I32_None) \
.get_op_info()
@op_info_register(abs_op_info)
def _abs_ds_tbe():
"""Abs TBE register"""
return