forked from mindspore-Ecosystem/mindspore
Add FloorMod, Acosh in ME
This commit is contained in:
parent
fb92b5b16e
commit
65cd9d48fd
|
@ -172,6 +172,8 @@ const char kNameBinaryCrossEntropy[] = "BinaryCrossEntropy";
|
|||
const char kNameBinaryCrossEntropyGrad[] = "BinaryCrossEntropyGrad";
|
||||
const char kNameSparseApplyAdagrad[] = "SparseApplyAdagrad";
|
||||
const char kNameSparseApplyFtrlD[] = "SparseApplyFtrlD";
|
||||
const char kNameAcosh[] = "Acosh";
|
||||
const char kNameFloorMod[] = "FloorMod";
|
||||
const char kNameSpaceToDepth[] = "SpaceToDepth";
|
||||
const char kNameDepthToSpace[] = "DepthToSpace";
|
||||
const char kNameSign[] = "Sign";
|
||||
|
@ -361,7 +363,12 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
|
|||
{string(kNameBinaryCrossEntropy), ADPT_DESC(BinaryCrossEntropy)},
|
||||
{string(kNameBinaryCrossEntropyGrad), ADPT_DESC(BinaryCrossEntropyGrad)},
|
||||
{string(kNameSparseApplyAdagrad), ADPT_DESC(SparseApplyAdagradD)},
|
||||
<<<<<<< HEAD
|
||||
{string(kNameSparseApplyFtrlD), ADPT_DESC(SparseApplyFtrlD)},
|
||||
=======
|
||||
{string(kNameAcosh), ADPT_DESC(Acosh)},
|
||||
{string(kNameFloorMod), ADPT_DESC(FloorMod)},
|
||||
>>>>>>> Add FloorMod, Acosh in ME
|
||||
{string(kNameSpaceToDepth), ADPT_DESC(SpaceToDepth)},
|
||||
{string(kNameDepthToSpace), ADPT_DESC(DepthToSpace)},
|
||||
{string(kNameSign), ADPT_DESC(Sign)},
|
||||
|
|
|
@ -356,6 +356,11 @@ INPUT_MAP(AcosGrad) = {{1, INPUT_DESC(y)}, {2, INPUT_DESC(dy)}};
|
|||
ATTR_MAP(AcosGrad) = EMPTY_ATTR_MAP;
|
||||
OUTPUT_MAP(AcosGrad) = {{0, OUTPUT_DESC(z)}};
|
||||
|
||||
// Acosh
|
||||
INPUT_MAP(Acosh) = {{1, INPUT_DESC(x)}};
|
||||
ATTR_MAP(Acosh) = EMPTY_ATTR_MAP;
|
||||
OUTPUT_MAP(Acosh) = {{0, OUTPUT_DESC(y)}};
|
||||
|
||||
// Floor
|
||||
INPUT_MAP(Floor) = {{1, INPUT_DESC(x)}};
|
||||
ATTR_MAP(Floor) = EMPTY_ATTR_MAP;
|
||||
|
@ -366,6 +371,11 @@ INPUT_MAP(FloorDiv) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}};
|
|||
ATTR_MAP(FloorDiv) = EMPTY_ATTR_MAP;
|
||||
OUTPUT_MAP(FloorDiv) = {{0, OUTPUT_DESC(y)}};
|
||||
|
||||
// FloorMod
|
||||
INPUT_MAP(FloorMod) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}};
|
||||
ATTR_MAP(FloorMod) = EMPTY_ATTR_MAP;
|
||||
OUTPUT_MAP(FloorMod) = {{0, OUTPUT_DESC(y)}};
|
||||
|
||||
// Sin
|
||||
INPUT_MAP(Sin) = {{1, INPUT_DESC(x)}};
|
||||
ATTR_MAP(Sin) = EMPTY_ATTR_MAP;
|
||||
|
|
|
@ -324,11 +324,15 @@ DECLARE_OP_USE_OUTPUT(Acos)
|
|||
|
||||
DECLARE_OP_ADAPTER(AcosGrad)
|
||||
DECLARE_OP_USE_OUTPUT(AcosGrad)
|
||||
DECLARE_OP_ADAPTER(Acosh)
|
||||
DECLARE_OP_USE_OUTPUT(Acosh)
|
||||
|
||||
DECLARE_OP_ADAPTER(Floor)
|
||||
DECLARE_OP_USE_OUTPUT(Floor)
|
||||
DECLARE_OP_ADAPTER(FloorDiv)
|
||||
DECLARE_OP_USE_OUTPUT(FloorDiv)
|
||||
DECLARE_OP_ADAPTER(FloorMod)
|
||||
DECLARE_OP_USE_OUTPUT(FloorMod)
|
||||
DECLARE_OP_ADAPTER(Sin)
|
||||
DECLARE_OP_USE_OUTPUT(Sin)
|
||||
DECLARE_OP_ADAPTER(Exp)
|
||||
|
@ -452,7 +456,6 @@ DECLARE_OP_USE_INPUT_ATTR(ApplyRMSPropD)
|
|||
DECLARE_OP_USE_OUTPUT(ApplyRMSPropD)
|
||||
DECLARE_OP_ADAPTER(ApplyCenteredRMSProp)
|
||||
DECLARE_OP_USE_OUTPUT(ApplyCenteredRMSProp)
|
||||
|
||||
#ifdef ENABLE_GE
|
||||
DECLARE_OP_ADAPTER(Print)
|
||||
DECLARE_OP_USE_DYN_INPUT(Print)
|
||||
|
|
|
@ -251,6 +251,20 @@ def get_bprop_floordiv(self):
|
|||
return bprop
|
||||
|
||||
|
||||
@bprop_getters.register(P.FloorMod)
|
||||
def get_bprop_floormod(self):
|
||||
"""Grad definition for `FloorMod` operation."""
|
||||
div_op = P.FloorMod()
|
||||
neg = P.Neg()
|
||||
mul_op = P.Mul()
|
||||
|
||||
def bprop(x, y, out, dout):
|
||||
bc_x = div_op(dout, y)
|
||||
bc_y = neg(mul_op(bc_x, out))
|
||||
return binop_grad_common(x, y, bc_x, bc_y)
|
||||
return bprop
|
||||
|
||||
|
||||
@bprop_getters.register(P.Square)
|
||||
def get_bprop_square(self):
|
||||
"""Grad definition for `Square` operation."""
|
||||
|
@ -690,6 +704,17 @@ def get_bprop_acos(self):
|
|||
return bprop
|
||||
|
||||
|
||||
@bprop_getters.register(P.Acosh)
|
||||
def get_bprop_acosh(self):
|
||||
"""Grad definition for `Acosh` operation."""
|
||||
input_grad = G.AcoshGrad()
|
||||
|
||||
def bprop(x, out, dout):
|
||||
dx = input_grad(x, dout)
|
||||
return (dx,)
|
||||
return bprop
|
||||
|
||||
|
||||
@bprop_getters.register(P.Abs)
|
||||
def get_bprop_abs(self):
|
||||
"""Grad definition for `Abs` operation."""
|
||||
|
|
|
@ -39,7 +39,7 @@ from .control_ops import ControlDepend, GeSwitch, Merge
|
|||
from .inner_ops import ScalarCast
|
||||
from .math_ops import (Abs, ACos, AddN, AssignAdd, AssignSub, Atan2, BatchMatMul,
|
||||
ReduceMax, ReduceMin, ReduceMean, ReduceSum, ReduceAll, ReduceProd, CumProd,
|
||||
Cos, Div, Equal, EqualCount, Exp, Floor, FloorDiv,
|
||||
Cos, Div, Equal, EqualCount, Exp, Floor, FloorDiv, FloorMod, Acosh,
|
||||
Greater, GreaterEqual, Less, LessEqual, Log, LogicalAnd,
|
||||
LogicalNot, LogicalOr, MatMul, Maximum,
|
||||
Minimum, Mul, Neg, NMSWithMask, NotEqual,
|
||||
|
@ -205,6 +205,8 @@ __all__ = [
|
|||
'Log',
|
||||
'SigmoidCrossEntropyWithLogits',
|
||||
'FloorDiv',
|
||||
'FloorMod',
|
||||
'Acosh',
|
||||
"PReLU",
|
||||
"Cos",
|
||||
"ACos",
|
||||
|
|
|
@ -1183,6 +1183,62 @@ class Floor(PrimitiveWithInfer):
|
|||
return x_dtype
|
||||
|
||||
|
||||
class FloorMod(_MathBinaryOp):
|
||||
"""
|
||||
Compute element-wise remainder of division.
|
||||
|
||||
The inputs must be two tensors or one tensor and one scalar.
|
||||
When the inputs are two tensors, the shapes of them could be broadcast,
|
||||
and the data types of them should be same.
|
||||
When the inputs are one tensor and one scalar, the scalar cannot be a parameter, only can be a constant,
|
||||
and the type of the scalar is the same as the data type of the tensor.
|
||||
|
||||
Inputs:
|
||||
- **input_x** (Union[Tensor, Number]) - The first input is a tensor whose data type is number or a number.
|
||||
- **input_y** (Union[Tensor, Number]) - The second input is a tensor whose data type is same as 'input_x' or
|
||||
a number.
|
||||
|
||||
Outputs:
|
||||
Tensor, the shape is same as the shape after broadcasting, and the data type is same as 'input_x'.
|
||||
|
||||
Examples:
|
||||
>>> input_x = Tensor(np.array([2, 4, -1]), mindspore.int32)
|
||||
>>> input_y = Tensor(np.array([3, 3, 3]), mindspore.int32)
|
||||
>>> floor_mod = FloorMod()
|
||||
>>> floor_mod(input_x, input_y)
|
||||
[2, 1, 2]
|
||||
"""
|
||||
|
||||
|
||||
class Acosh(PrimitiveWithInfer):
|
||||
"""
|
||||
Compute inverse hyperbolic cosine of x element-wise.
|
||||
|
||||
Inputs:
|
||||
- **input_x** (Tensor) - The shape of tensor is :math:`(x_1, x_2, ..., x_R)`.
|
||||
|
||||
Outputs:
|
||||
Tensor, has the same shape as `input_x`.
|
||||
|
||||
Examples:
|
||||
>>> acosh = Acosh()
|
||||
>>> X = Tensor(np.array([1.0, 1.5, 3.0, 100.0]), ms.float32)
|
||||
>>> output = acosh(X)
|
||||
"""
|
||||
|
||||
@prim_attr_register
|
||||
def __init__(self):
|
||||
"""init Acosh"""
|
||||
|
||||
def infer_shape(self, x):
|
||||
return x
|
||||
|
||||
def infer_dtype(self, x):
|
||||
validator.check_subclass("x_dtype", x, mstype.tensor)
|
||||
validator.check_typename('x_dtype', x, mstype.number_type)
|
||||
return x
|
||||
|
||||
|
||||
class _LogicBinaryOp(_BinaryOp):
|
||||
"""
|
||||
Define logic binary operators.
|
||||
|
|
|
@ -219,6 +219,10 @@ test_case_math_ops = [
|
|||
'block': P.ACos(),
|
||||
'desc_inputs': [[2, 3]],
|
||||
'desc_bprop': [[2, 3]]}),
|
||||
('Acosh', {
|
||||
'block': P.Acosh(),
|
||||
'desc_inputs': [Tensor(np.random.rand(4).astype(np.float16))],
|
||||
'skip': ['backward']}),
|
||||
('Sin', {
|
||||
'block': P.Sin(),
|
||||
'desc_inputs': [[2, 3]],
|
||||
|
@ -301,6 +305,11 @@ test_case_math_ops = [
|
|||
'desc_inputs': [Tensor(np.random.rand(4).astype(np.float16)),
|
||||
Tensor(np.random.rand(4).astype(np.float16))],
|
||||
'skip': ['backward']}),
|
||||
('FloorMod', {
|
||||
'block': P.FloorMod(),
|
||||
'desc_inputs': [Tensor(np.random.rand(4).astype(np.float16)),
|
||||
Tensor(np.random.rand(4).astype(np.float16))],
|
||||
'skip': ['backward']}),
|
||||
('identity', {
|
||||
'block': ops.functional.identity,
|
||||
'desc_inputs': [[2, 2]],
|
||||
|
|
Loading…
Reference in New Issue