!25317 [feat] [assistant] [I48OAB] add dynamic shape for Softsign operator

Merge pull request !25317 from 路雄博/softsign_dynamic
This commit is contained in:
i-robot 2022-02-22 03:46:55 +00:00 committed by Gitee
commit 36c6b30b3f
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
6 changed files with 141 additions and 8 deletions

View File

@ -373,6 +373,7 @@ MS_CORE_API inline const PrimitivePtr kPrimAudioSpectrogram = std::make_shared<P
MS_CORE_API inline const PrimitivePtr kPrimFlatten = std::make_shared<Primitive>("Flatten");
MS_CORE_API inline const PrimitivePtr kPrimCrop = std::make_shared<Primitive>("Crop");
MS_CORE_API inline const PrimitivePtr kPrimFlattenGrad = std::make_shared<Primitive>("FlattenGrad");
MS_CORE_API inline const PrimitivePtr kPrimSoftsign = std::make_shared<Primitive>("Softsign");
MS_CORE_API inline const PrimitivePtr kPrimSoftmax = std::make_shared<Primitive>("Softmax");
MS_CORE_API inline const PrimitivePtr kPrimSparseSoftmaxCrossEntropy =
std::make_shared<Primitive>("SparseSoftmaxCrossEntropy");

View File

@ -0,0 +1,60 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ops/softsign.h"
#include <string>
#include <algorithm>
#include <map>
#include <set>
#include <vector>
#include "ops/op_utils.h"
#include "utils/check_convert_utils.h"
#include "abstract/primitive_infer_map.h"
namespace mindspore {
namespace ops {
namespace {
abstract::ShapePtr InferShape(const PrimitivePtr &primitive, const std::vector<AbstractBasePtr> &input_args) {
MS_EXCEPTION_IF_NULL(primitive);
auto prim_name = primitive->name();
(void)CheckAndConvertUtils::CheckInteger("input numbers", SizeToLong(input_args.size()), kGreaterEqual, 1, prim_name);
(void)CheckAndConvertUtils::CheckArgs<abstract::AbstractTensor>(prim_name, input_args, 0);
auto x = input_args[0]->BuildShape();
MS_EXCEPTION_IF_NULL(x);
auto shape_element = x->cast<abstract::ShapePtr>();
MS_EXCEPTION_IF_NULL(shape_element);
return shape_element;
}
TypePtr InferType(const PrimitivePtr &prim, const std::vector<AbstractBasePtr> &input_args) {
MS_EXCEPTION_IF_NULL(prim);
auto prim_name = prim->name();
(void)CheckAndConvertUtils::CheckInteger("input numbers", SizeToLong(input_args.size()), kEqual, 1, prim_name);
MS_EXCEPTION_IF_NULL(input_args[0]);
auto x_type = input_args[0]->BuildType();
const std::set valid_types = {kFloat16, kFloat32};
(void)CheckAndConvertUtils::CheckTensorTypeValid("input_x", x_type, valid_types, prim_name);
return x_type;
}
} // namespace
AbstractBasePtr SoftsignInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
const std::vector<AbstractBasePtr> &input_args) {
auto type = InferType(primitive, input_args);
auto shape = InferShape(primitive, input_args);
return abstract::MakeAbstract(shape, type);
}
REGISTER_PRIMITIVE_EVAL_IMPL(Softsign, prim::kPrimSoftsign, SoftsignInfer, nullptr, true);
} // namespace ops
} // namespace mindspore

View File

@ -0,0 +1,40 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CORE_OPS_SOFTSIGN_H_
#define MINDSPORE_CORE_OPS_SOFTSIGN_H_
#include <map>
#include <vector>
#include <string>
#include <memory>
#include "ops/primitive_c.h"
#include "abstract/abstract_value.h"
#include "utils/check_convert_utils.h"
#include "ops/op_utils.h"
namespace mindspore {
namespace ops {
constexpr auto kNameSoftsign = "Softsign";
class Softsign : public PrimitiveC {
public:
Softsign() : PrimitiveC(kNameSoftsign) { InitIOName({"x"}, {"output"}); }
~Softsign() = default;
MS_DECLARE_PARENT(Softsign, PrimitiveC);
void Init() {}
};
} // namespace ops
} // namespace mindspore
#endif // MINDSPORE_CORE_OPS_SOFTSIGN_H_

View File

@ -232,6 +232,7 @@ from .tanh_grad_ds import _tanh_grad_ds_tbe
from .softmax import _softmax_tbe
from .softmax_ds import _softmax_ds_tbe
from .softsign import _softsign_tbe
from .softsign_ds import _softsign_ds_tbe
from .softplus import _softplus_tbe
from .softplus_ds import _softplus_ds_tbe
from .softplus_grad import _softplus_grad_tbe

View File

@ -0,0 +1,38 @@
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Softsign op"""
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
softsign_op_info = TBERegOp("Softsign") \
.fusion_type("ELEMWISE") \
.async_flag(False) \
.binfile_name("softsign.so") \
.compute_cost(10) \
.kernel_name("softsign") \
.partial_flag(True) \
.dynamic_shape(True) \
.input(0, "x", False, "required", "all") \
.output(0, "y", False, "required", "all") \
.op_pattern("formatAgnostic") \
.dtype_format(DataType.F16_None, DataType.F16_None) \
.dtype_format(DataType.F32_None, DataType.F32_None) \
.get_op_info()
@op_info_register(softsign_op_info)
def _softsign_ds_tbe():
"""Softsign TBE register"""
return

View File

@ -425,7 +425,7 @@ class Softplus(Primitive):
self.init_prim_io_names(inputs=['x'], outputs=['output'])
class Softsign(PrimitiveWithInfer):
class Softsign(Primitive):
r"""
Softsign activation function.
@ -462,13 +462,6 @@ class Softsign(PrimitiveWithInfer):
"""Initialize Softsign"""
self.init_prim_io_names(inputs=['x'], outputs=['output'])
def infer_shape(self, input_x):
return input_x
def infer_dtype(self, input_x):
validator.check_tensor_dtype_valid('input_x', input_x, [mstype.float16, mstype.float32], self.name)
return input_x
class ReLU(Primitive):
r"""