forked from mindspore-Ecosystem/mindspore
!46810 fix prelu when weight is negative
Merge pull request !46810 from 冯一航/fix_prelu
This commit is contained in:
commit
5369b75de7
|
@ -1124,15 +1124,9 @@ class PReLU(Cell):
|
|||
f"but got {type(w).__name__}.")
|
||||
self.w = Parameter(w, name='a')
|
||||
self.prelu = P.PReLU()
|
||||
self.relu = P.ReLU()
|
||||
self.assign = P.Assign()
|
||||
|
||||
def construct(self, x):
|
||||
u = self.relu(self.w)
|
||||
v = self.prelu(x, F.cast(u, x.dtype))
|
||||
if self.training:
|
||||
self.assign(self.w, u)
|
||||
return v
|
||||
return self.prelu(x, F.cast(self.w, x.dtype))
|
||||
|
||||
|
||||
class HSwish(Cell):
|
||||
|
@ -1525,6 +1519,7 @@ class GLU(Cell):
|
|||
x2 = self.sigmoid(x2)
|
||||
return x1 * x2
|
||||
|
||||
|
||||
_activation = {
|
||||
'softmin': Softmin,
|
||||
'softmax': Softmax,
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
# Copyright 2022 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
import mindspore as ms
|
||||
import mindspore.nn as nn
|
||||
|
||||
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.pool = nn.PReLU(channel=2, w=-0.25)
|
||||
|
||||
def construct(self, x):
|
||||
out = self.pool(x)
|
||||
return out
|
||||
|
||||
|
||||
@pytest.mark.level0
|
||||
@pytest.mark.platform_x86_cpu
|
||||
@pytest.mark.platform_arm_cpu
|
||||
@pytest.mark.platform_x86_gpu_training
|
||||
@pytest.mark.platform_arm_ascend_training
|
||||
@pytest.mark.platform_x86_ascend_training
|
||||
@pytest.mark.env_onecard
|
||||
@pytest.mark.parametrize('mode', [ms.GRAPH_MODE, ms.PYNATIVE_MODE])
|
||||
def test_prelu_normal(mode):
|
||||
"""
|
||||
Feature: PReLU
|
||||
Description: Verify the result of PReLU
|
||||
Expectation: success
|
||||
"""
|
||||
ms.set_context(mode=mode)
|
||||
x = ms.Tensor([[[0.9192, -0.1487],
|
||||
[-0.3999, -0.6840]],
|
||||
|
||||
[[0.4745, -0.6271],
|
||||
[-0.6547, -0.5856]],
|
||||
|
||||
[[-0.2572, -0.8412],
|
||||
[0.1918, -0.6117]]])
|
||||
net = Net()
|
||||
out = net(x)
|
||||
expect_out = np.array([[[0.9192, 0.037175],
|
||||
[0.099975, 0.171]],
|
||||
|
||||
[[0.4745, 0.156775],
|
||||
[0.163675, 0.1464]],
|
||||
|
||||
[[0.0643, 0.2103],
|
||||
[0.1918, 0.152925]]])
|
||||
assert np.allclose(out.asnumpy().astype(np.float16), expect_out.astype(np.float16))
|
Loading…
Reference in New Issue