From 86a88d48ad694dd6ee0334fb3fc639b1798af181 Mon Sep 17 00:00:00 2001 From: shaojunsong Date: Fri, 17 Feb 2023 15:53:19 +0800 Subject: [PATCH] Fix I5YRQ8 --- mindspore/python/mindspore/nn/layer/activation.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/mindspore/python/mindspore/nn/layer/activation.py b/mindspore/python/mindspore/nn/layer/activation.py index 79119ac1d81..9ba34dc0673 100644 --- a/mindspore/python/mindspore/nn/layer/activation.py +++ b/mindspore/python/mindspore/nn/layer/activation.py @@ -556,16 +556,17 @@ class RReLU(Cell): raise ValueError(f"For {self.cls_name}, the value of 'upper' must be greater than 'lower', " f"but got upper: {upper}, lower: {lower}. ") - self.lower = lower - self.upper = upper + self.lower = Tensor(lower) + self.upper = Tensor(upper) self.sign = P.Sign() def construct(self, x): - size = x.shape + _size = x.shape + _dtype = x.dtype sign_matrix = self.sign(x) negative_filter = sign_matrix.clip(None, 0) positive_filter = sign_matrix.clip(0, None) - mask = P.Cast()(Tensor(np.random.uniform(self.lower, self.upper, size=size)), P.DType()(x)) + mask = ops.uniform(_size, self.lower.astype(_dtype), self.upper.astype((_dtype)), dtype=_dtype) negative_mask = negative_filter * mask * -1 total_mask = negative_mask + positive_filter out = total_mask * x