!269 Refactor random normal op

Merge pull request !269 from peixu_ren/custom_aicpu
This commit is contained in:
mindspore-ci-bot 2020-07-15 09:40:14 +08:00 committed by Gitee
commit 8cc51969f3
9 changed files with 145 additions and 50 deletions

View File

@ -34,7 +34,7 @@ from .random_categorical import _random_categorical_aicpu
from .cast import _cast_aicpu
from .mirror_pad import _mirror_pad_aicpu
from .mirror_pad_grad import _mirror_pad_grad_aicpu
from .normal import _normal_aicpu
from .standard_normal import _standard_normal_aicpu
from .gamma import _gamma_aicpu
from .poisson import _poisson_aicpu
from .uniform_int import _uniform_int_aicpu

View File

@ -16,18 +16,17 @@
"""RandomNormal op"""
from mindspore.ops.op_info_register import op_info_register, AiCPURegOp, DataType
normal_op_info = AiCPURegOp("Normal") \
normal_op_info = AiCPURegOp("StandardNormal") \
.fusion_type("OPAQUE") \
.input(0, "shape", "required") \
.input(1, "mean", "required") \
.input(2, "stddev", "required") \
.output(0, "output", "required") \
.attr("seed", "int") \
.dtype_format(DataType.I32_Default, DataType.F32_Default, DataType.F32_Default, DataType.F32_Default) \
.dtype_format(DataType.I32_NCHW, DataType.F32_NCHW, DataType.F32_NCHW, DataType.F32_NCHW) \
.attr("seed2", "int") \
.dtype_format(DataType.I32_Default, DataType.F32_Default) \
.dtype_format(DataType.I32_NCHW, DataType.F32_NCHW) \
.get_op_info()
@op_info_register(normal_op_info)
def _normal_aicpu():
def _standard_normal_aicpu():
"""RandomNormal AiCPU register"""
return

View File

@ -27,6 +27,7 @@ from .clip_ops import clip_by_value
from .multitype_ops.add_impl import hyper_add
from .multitype_ops.ones_like_impl import ones_like
from .multitype_ops.zeros_like_impl import zeros_like
from .random_ops import normal
__all__ = [
@ -47,4 +48,5 @@ __all__ = [
'zeros_like',
'ones_like',
'zip_operation',
'clip_by_value']
'normal',
'clip_by_value',]

View File

@ -0,0 +1,63 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Operations for random number generatos."""
from mindspore.ops.primitive import constexpr
from .. import operations as P
# set graph-level RNG seed
_GRAPH_SEED = 0
@constexpr
def set_seed(seed):
global _GRAPH_SEED
_GRAPH_SEED = seed
@constexpr
def get_seed():
return _GRAPH_SEED
def normal(shape, mean, stddev, seed):
"""
Generates random numbers according to the Normal (or Gaussian) random number distribution.
It is defined as:
Args:
- **shape** (tuple) - The shape of random tensor to be generated.
- **mean** (Tensor) - The mean μ distribution parameter, which specifies the location of the peak.
With float32 data type.
- **stddev** (Tensor) - The deviation σ distribution parameter. With float32 data type.
- **seed** (int): Seed is used as entropy source for Random number engines generating pseudo-random numbers.
Default: 0.
Returns:
Tensor. The shape should be the broadcasted shape of Input "shape" and shapes of mean and stddev.
The dtype is float32.
Examples:
>>> shape = (4, 16)
>>> mean = Tensor(1.0, mstype.float32)
>>> stddev = Tensor(1.0, mstype.float32)
>>> output = C.normal(shape, mean, stddev, seed=5)
"""
set_seed(10)
seed1 = get_seed()
seed2 = seed
stdnormal = P.StandardNormal(seed1, seed2)
rnd = stdnormal(shape)
value = rnd * stddev + mean
return value

View File

@ -54,7 +54,7 @@ from .math_ops import (Abs, ACos, Asin, Asinh, AddN, AccumulateNV2, AssignAdd, A
Sin, Sqrt, Rsqrt, BesselI0e, BesselI1e,
Square, Sub, TensorAdd, Sign, Round, SquareSumAll, Atan, Atanh, Cosh, Sinh, Eps)
from .random_ops import (RandomChoiceWithMask, Normal, Gamma, Poisson, UniformInt, UniformReal,
from .random_ops import (RandomChoiceWithMask, StandardNormal, Gamma, Poisson, UniformInt, UniformReal,
RandomCategorical, Laplace)
from .nn_ops import (LSTM, SGD, Adam, SparseApplyAdam, SparseApplyLazyAdam, ApplyMomentum, BatchNorm,
BiasAdd, Conv2D,
@ -173,7 +173,7 @@ __all__ = [
'HSigmoid',
'Tanh',
'RandomChoiceWithMask',
'Normal',
'StandardNormal',
'Gamma',
'Poisson',
'UniformInt',

View File

@ -22,23 +22,16 @@ from ..primitive import PrimitiveWithInfer, prim_attr_register
from .._utils import get_broadcast_shape
class Normal(PrimitiveWithInfer):
class StandardNormal(PrimitiveWithInfer):
r"""
Generates random numbers according to the Normal (or Gaussian) random number distribution.
It is defined as:
.. math::
\text{f}(x;μ,σ) = \frac{1}{σ\sqrt{2π}}\exp(-\frac{1}{2}(\frac{x-μ}{σ})^2),
Generates random numbers according to the standard Normal (or Gaussian) random number distribution.
Args:
seed (int): Seed data is used as entropy source for Random number engines generating pseudo-random numbers.
Default: 0.
seed (int): Random seed. Default: 0.
seed2 (int): Random seed2. Default: 0.
Inputs:
- **shape** (tuple) - The shape of random tensor to be generated. Only constant value is allowed.
- **mean** (Tensor) - The mean μ distribution parameter, which specifies the location of the peak.
With float32 data type.
- **stddev** (Tensor) - The deviation σ distribution parameter. With float32 data type.
Outputs:
Tensor. The shape should be the broadcasted shape of Input "shape" and shapes of mean and stddev.
@ -46,31 +39,26 @@ class Normal(PrimitiveWithInfer):
Examples:
>>> shape = (4, 16)
>>> mean = Tensor(1.0, mstype.float32)
>>> stddev = Tensor(1.0, mstype.float32)
>>> normal = P.Normal(seed=2)
>>> output = normal(shape, mean, stddev)
>>> stdnormal = P.StandardNormal(seed=2)
>>> output = stdnormal(shape)
"""
@prim_attr_register
def __init__(self, seed=0):
"""Init Normal"""
self.init_prim_io_names(inputs=['shape', 'mean', 'stddev'], outputs=['output'])
def __init__(self, seed=0, seed2=0):
"""Init StandardNormal"""
self.init_prim_io_names(inputs=['shape'], outputs=['output'])
validator.check_value_type('seed', seed, [int], self.name)
validator.check_value_type('seed2', seed2, [int], self.name)
def __infer__(self, shape, mean, stddev):
def __infer__(self, shape):
shape_v = shape["value"]
if shape_v is None:
raise ValueError(f"For {self.name}, shape must be const.")
validator.check_value_type("shape", shape_v, [tuple], self.name)
for i, shape_i in enumerate(shape_v):
validator.check_integer("shape[%d]" % i, shape_i, 0, Rel.GT, self.name)
validator.check_tensor_type_same({"mean": mean["dtype"]}, [mstype.float32], self.name)
validator.check_tensor_type_same({"stddev": stddev["dtype"]}, [mstype.float32], self.name)
broadcast_shape = get_broadcast_shape(mean['shape'], stddev['shape'], self.name)
broadcast_shape = get_broadcast_shape(broadcast_shape, shape_v, self.name)
out = {
'shape': broadcast_shape,
'shape': shape_v,
'dtype': mstype.float32,
'value': None}
return out

View File

@ -12,13 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.common import dtype as mstype
from mindspore.ops import composite as C
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
@ -26,11 +28,11 @@ context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self, shape, seed=0):
super(Net, self).__init__()
self.normal = P.Normal(seed=seed)
self.shape = shape
self.seed = seed
def construct(self, mean, stddev):
return self.normal(self.shape, mean, stddev)
return C.normal(self.shape, mean, stddev, self.seed)
def test_net_1D():
@ -51,7 +53,7 @@ def test_net_ND():
mean = np.array([[[1], [2]], [[3], [4]], [[5], [6]]]).astype(np.float32)
stddev = np.array([1.0]).astype(np.float32)
net = Net(shape, seed)
tmean, tstddev = Tensor(mean), Tensor(stddev)
tmean, tstddev = Tensor(mean, mstype.float32), Tensor(stddev, mstype.float32)
output = net(tmean, tstddev)
print(output.asnumpy())
assert output.shape == (3, 2, 2)

View File

@ -0,0 +1,47 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common import dtype as mstype
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self, shape, seed=0, seed2=0):
super(Net, self).__init__()
self.shape = shape
self.seed = seed
self.seed2 = seed2
self.stdnormal = P.StandardNormal(seed, seed2)
def construct(self):
return self.stdnormal(self.shape, self.seed, self.seed2)
def test_net():
seed = 10
seed2 = 10
shape = (3, 2, 4)
net = Net(shape, seed, seed2)
output = net()
print(output.asnumpy())
assert output.shape == (3, 2, 4)

View File

@ -530,15 +530,13 @@ class InplaceSubNet(nn.Cell):
class NormalNet(nn.Cell):
def __init__(self, shape=None, mean=0.0, stddev=1.0, seed=0):
def __init__(self, shape=None, seed=0):
super(NormalNet, self).__init__()
self.normal = P.Normal(seed=seed)
self.shape = shape
self.mean = Tensor(mean, mstype.float32)
self.stddev = Tensor(stddev, mstype.float32)
self.seed = seed
def construct(self):
out = self.normal(self.shape, self.mean, self.stddev)
def construct(self, mean, stddev):
out = C.normal(self.shape, mean, stddev, self.seed)
return out
@ -860,8 +858,8 @@ test_case_math_ops = [
'desc_inputs': [[64, 128, 1024]],
'skip': ['backward']}),
('Normal', {
'block': NormalNet((3, 2, 4), 0.0, 1.0, 0),
'desc_inputs': [],
'block': NormalNet((3, 2, 4), 0),
'desc_inputs': [Tensor(0.0, mstype.float32), Tensor(1.0, mstype.float32)],
'skip': ['backward']}),
('Laplace', {
'block': LaplaceNet((3, 2, 4), 0),
@ -1171,10 +1169,6 @@ test_case_math_ops = [
'desc_inputs': [Tensor([-1.0, 0.0, 1.5, 2.0, 5.0, 15], mstype.float16), Tensor([0.0, 5.0], mstype.float16)],
'desc_bprop': [],
'skip': ['backward']}),
('Normal', {
'block': NormalNet((3, 2, 4), 0.0, 1.0, 0),
'desc_inputs': [],
'skip': ['backward']}),
('Mod', {
'block': P.Mod(),
'desc_inputs': [[3, 4, 5], [2, 3, 4, 5]],