!25369 [feat] [assistant] [I48OBD] add dynamic shape for BesselI1e operator

Merge pull request !25369 from lyl116/BesselI1e
This commit is contained in:
i-robot 2021-11-30 08:49:11 +00:00 committed by Gitee
commit fce7c056c7
6 changed files with 141 additions and 7 deletions

View File

@ -297,6 +297,7 @@ inline const PrimitivePtr kPrimAsinh = std::make_shared<Primitive>("Asinh");
inline const PrimitivePtr kPrimAcosh = std::make_shared<Primitive>("Acosh");
inline const PrimitivePtr kPrimAtanh = std::make_shared<Primitive>("Atanh");
inline const PrimitivePtr kPrimApplyGradientDescent = std::make_shared<Primitive>("ApplyGradientDescent");
inline const PrimitivePtr kPrimBesselI1e = std::make_shared<Primitive>("BesselI1e");
inline const PrimitivePtr kPrimTanhGrad = std::make_shared<Primitive>("TanhGrad");
inline const PrimitivePtr kPrimPooling = std::make_shared<Primitive>("Pooling");
inline const PrimitivePtr kPrimPoolingGrad = std::make_shared<Primitive>("PoolingGrad");

View File

@ -0,0 +1,59 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ops/bessel_i1e.h"
#include <string>
#include <algorithm>
#include <map>
#include <set>
#include <vector>
#include "ops/op_utils.h"
#include "utils/check_convert_utils.h"
#include "abstract/primitive_infer_map.h"
namespace mindspore {
namespace ops {
namespace {
abstract::ShapePtr BesselI1eInferShape(const PrimitivePtr &primitive, const std::vector<AbstractBasePtr> &input_args) {
MS_EXCEPTION_IF_NULL(primitive);
auto prim_name = primitive->name();
(void)CheckAndConvertUtils::CheckArgs<abstract::AbstractTensor>(prim_name, input_args, 0);
auto x = input_args[0]->BuildShape();
MS_EXCEPTION_IF_NULL(x);
auto shape_element = x->cast<abstract::ShapePtr>();
MS_EXCEPTION_IF_NULL(shape_element);
return shape_element;
}
TypePtr BesselI1eInferType(const PrimitivePtr &primitive, const std::vector<AbstractBasePtr> &input_args) {
MS_EXCEPTION_IF_NULL(primitive);
auto prim_name = primitive->name();
MS_EXCEPTION_IF_NULL(input_args[0]);
auto x_type = input_args[0]->BuildType();
(void)CheckAndConvertUtils::CheckTensorTypeValid("input_x", x_type, common_valid_types, prim_name);
return x_type;
}
} // namespace
AbstractBasePtr BesselI1eInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
const std::vector<AbstractBasePtr> &input_args) {
MS_EXCEPTION_IF_NULL(primitive);
const int64_t input_num = 1;
CheckAndConvertUtils::CheckInputArgs(input_args, kEqual, input_num, primitive->name());
auto infer_type = BesselI1eInferType(primitive, input_args);
auto infer_shape = BesselI1eInferShape(primitive, input_args);
return abstract::MakeAbstract(infer_shape, infer_type);
}
REGISTER_PRIMITIVE_EVAL_IMPL(BesselI1e, prim::kPrimBesselI1e, BesselI1eInfer, nullptr, true);
} // namespace ops
} // namespace mindspore

View File

@ -0,0 +1,39 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CORE_OPS_BESSEL_I1E_H_
#define MINDSPORE_CORE_OPS_BESSEL_I1E_H_
#include <map>
#include <vector>
#include <string>
#include <memory>
#include "ops/primitive_c.h"
#include "abstract/abstract_value.h"
#include "utils/check_convert_utils.h"
namespace mindspore {
namespace ops {
constexpr auto kNameBesselI1e = "BesselI1e";
class BesselI1e : public PrimitiveC {
public:
BesselI1e() : PrimitiveC(kNameBesselI1e) { InitIOName({"x"}, {"output"}); }
~BesselI1e() = default;
MS_DECLARE_PARENT(BesselI1e, PrimitiveC);
void Init() {}
};
} // namespace ops
} // namespace mindspore
#endif // MINDSPORE_CORE_OPS_BESSEL_I1E_H_

View File

@ -332,6 +332,7 @@ from .scatter_add_ds import _scatter_add_ds_tbe
from .atan2 import _atan2_tbe
from .bessel_i0e import _bessel_i0e_tbe
from .bessel_i1e import _bessel_i1e_tbe
from .bessel_i1e_ds import _bessel_i1e_ds_tbe
from .batch_to_space_nd import _batch_to_space_nd_tbe
from .space_to_batch_nd import _space_to_batch_nd_tbe
from .bitwise_and import _bitwise_and_tbe

View File

@ -0,0 +1,38 @@
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""BesselI1e op"""
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
bessel_i1e_op_info = TBERegOp("BesselI1e") \
.fusion_type("ELEMWISE") \
.async_flag(False) \
.binfile_name("bessel_i1e.so") \
.compute_cost(10) \
.kernel_name("bessel_i1e") \
.partial_flag(True) \
.dynamic_shape(True) \
.op_pattern("formatAgnostic") \
.input(0, "x", False, "required", "all") \
.output(0, "y", False, "required", "all") \
.dtype_format(DataType.F16_None, DataType.F16_None) \
.dtype_format(DataType.F32_None, DataType.F32_None) \
.get_op_info()
@op_info_register(bessel_i1e_op_info)
def _bessel_i1e_ds_tbe():
"""BesselI1e TBE register"""
return

View File

@ -5041,7 +5041,7 @@ class BesselI0e(PrimitiveWithInfer):
return x
class BesselI1e(PrimitiveWithInfer):
class BesselI1e(Primitive):
r"""
Computes BesselI1e of input element-wise.
@ -5062,6 +5062,7 @@ class BesselI1e(PrimitiveWithInfer):
Raises:
TypeError: If `x` is not a Tensor.
TypeError: If dtype of `x` is not float16 or float32.
Supported Platforms:
``Ascend``
@ -5077,13 +5078,8 @@ class BesselI1e(PrimitiveWithInfer):
@prim_attr_register
def __init__(self):
"""Initialize BesselI1e"""
self.init_prim_io_names(inputs=['x'], outputs='output')
def infer_shape(self, x):
return x
def infer_dtype(self, x):
validator.check_tensor_dtype_valid('x', x, mstype.number_type, self.name)
return x
class Inv(PrimitiveWithInfer):