!16771 covert python to C++ in ReLUGradV2 operator
From: @shen_jingxing Reviewed-by: @ginfung,@zh_qh Signed-off-by: @zh_qh
This commit is contained in:
commit
edfab4b8a0
|
@ -60,6 +60,7 @@ constexpr auto kReLU = "ReLU";
|
|||
constexpr auto kReLU6 = "ReLU6";
|
||||
constexpr auto kReLUV2 = "ReLUV2";
|
||||
constexpr auto kReLUGrad = "ReluGrad";
|
||||
constexpr auto kReLUGradV2 = "ReluGradV2";
|
||||
constexpr auto kGeLUGrad = "GeLUGrad";
|
||||
constexpr auto kFastGeLU = "FastGeLU";
|
||||
constexpr auto kFastGeLUGrad = "FastGeLUGrad";
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
/**
|
||||
* Copyright 2021 Huawei Technologies Co., Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include "ops/grad/relu_grad_v2.h"
|
||||
#include <string>
|
||||
#include <algorithm>
|
||||
#include <map>
|
||||
#include <set>
|
||||
#include <vector>
|
||||
|
||||
#include "ops/op_utils.h"
|
||||
#include "utils/check_convert_utils.h"
|
||||
#include "abstract/primitive_infer_map.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace ops {
|
||||
namespace {
|
||||
abstract::ShapePtr InferShape(const PrimitivePtr &primitive, const std::vector<AbstractBasePtr> &input_args) {
|
||||
MS_EXCEPTION_IF_NULL(primitive);
|
||||
auto prim_name = primitive->name();
|
||||
CheckAndConvertUtils::CheckInteger("input numbers", input_args.size(), kEqual, 2, prim_name);
|
||||
for (const auto &item : input_args) {
|
||||
MS_EXCEPTION_IF_NULL(item);
|
||||
}
|
||||
auto x = input_args[0]->BuildShape();
|
||||
MS_EXCEPTION_IF_NULL(x);
|
||||
auto shape_element = x->cast<abstract::ShapePtr>();
|
||||
MS_EXCEPTION_IF_NULL(shape_element);
|
||||
return shape_element;
|
||||
}
|
||||
TypePtr InferType(const PrimitivePtr &prim, const std::vector<AbstractBasePtr> &input_args) {
|
||||
MS_EXCEPTION_IF_NULL(prim);
|
||||
auto prim_name = prim->name();
|
||||
CheckAndConvertUtils::CheckInteger("ReLUGradV2 infer", input_args.size(), kEqual, 2, prim_name);
|
||||
MS_EXCEPTION_IF_NULL(input_args[0]);
|
||||
auto x_type_map = input_args[0]->BuildType();
|
||||
MS_EXCEPTION_IF_NULL(x_type_map);
|
||||
auto x_type = x_type_map->cast<TensorTypePtr>();
|
||||
MS_EXCEPTION_IF_NULL(x_type);
|
||||
std::set<TypePtr> valid_x_type = {kTensorType};
|
||||
return CheckAndConvertUtils::CheckTensorTypeValid("input_x", x_type, valid_x_type, prim_name);
|
||||
}
|
||||
} // namespace
|
||||
AbstractBasePtr ReLUGradV2Infer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
|
||||
const std::vector<AbstractBasePtr> &input_args) {
|
||||
return std::make_shared<abstract::AbstractTensor>(InferType(primitive, input_args),
|
||||
InferShape(primitive, input_args));
|
||||
}
|
||||
REGISTER_PRIMITIVE_EVAL_IMPL(ReLUGradV2, prim::kPrimReluGradV2, ReLUGradV2Infer, nullptr, true);
|
||||
|
||||
} // namespace ops
|
||||
} // namespace mindspore
|
|
@ -0,0 +1,41 @@
|
|||
/**
|
||||
* Copyright 2021 Huawei Technologies Co., Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef MINDSPORE_CORE_OPS_RELUGRADV2_H_
|
||||
#define MINDSPORE_CORE_OPS_RELUGRADV2_H_
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <memory>
|
||||
#include "ops/primitive_c.h"
|
||||
#include "ops/op_utils.h"
|
||||
#include "abstract/abstract_value.h"
|
||||
#include "utils/check_convert_utils.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace ops {
|
||||
constexpr auto kNameReLUGradV2 = prim::kReLUGradV2;
|
||||
class ReLUGradV2 : public PrimitiveC {
|
||||
public:
|
||||
ReLUGradV2() : PrimitiveC(prim::kPrimReluGradV2->name()) { InitIOName({"x"}, {"output"}); }
|
||||
~ReLUGradV2() = default;
|
||||
MS_DECLARE_PARENT(ReLUGradV2, PrimitiveC);
|
||||
void Init() {}
|
||||
};
|
||||
using PrimReLUGradV2Ptr = std::shared_ptr<ReLUGradV2>;
|
||||
} // namespace ops
|
||||
} // namespace mindspore
|
||||
|
||||
#endif // MINDSPORE_CORE_OPS_RELUGRADV2_H_
|
|
@ -1606,7 +1606,7 @@ class ReLU6Grad(PrimitiveWithInfer):
|
|||
return x_dtype
|
||||
|
||||
|
||||
class ReluGradV2(PrimitiveWithInfer):
|
||||
class ReluGradV2(Primitive):
|
||||
"""Performs grad of ReLUV2 operation."""
|
||||
|
||||
@prim_attr_register
|
||||
|
@ -1616,14 +1616,6 @@ class ReluGradV2(PrimitiveWithInfer):
|
|||
def __call__(self, gradients, mask):
|
||||
raise NotImplementedError
|
||||
|
||||
def infer_shape(self, gradients_shape, mask_shape):
|
||||
return gradients_shape
|
||||
|
||||
def infer_dtype(self, gradients_dtype, mask_dtype):
|
||||
validator.check_tensor_dtype_valid('gradients', gradients_dtype, mstype.number_type, self.name)
|
||||
validator.check_tensor_dtype_valid('mask', mask_dtype, (mstype.uint8,), self.name)
|
||||
return gradients_dtype
|
||||
|
||||
|
||||
class EluGrad(PrimitiveWithInfer):
|
||||
"""Performs grad of Elu operation."""
|
||||
|
|
Loading…
Reference in New Issue