!69147 revert MeanExt

Merge pull request !69147 from wang_ziqi/br_revert_MeanExt/master
This commit is contained in:
i-robot 2024-05-08 06:49:37 +00:00 committed by Gitee
commit b8168e0094
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
9 changed files with 185 additions and 15 deletions

View File

@ -1,6 +1,6 @@
mindspore.Tensor.mean
=====================
.. py:method:: mindspore.Tensor.mean(axis=None, keep_dims=False, dtype=None)
.. py:method:: mindspore.Tensor.mean(axis=None, keep_dims=False)
详情请参考 :func:`mindspore.ops.mean`

View File

@ -1,7 +1,7 @@
mindspore.ops.mean
==================
.. py:function:: mindspore.ops.mean(input, axis=None, keep_dims=False, dtype=None)
.. py:function:: mindspore.ops.mean(input, axis=None, keep_dims=False)
默认情况下,移除输入所有维度,返回 `input` 中所有元素的平均值。也可仅缩小指定维度 `axis` 大小至1。 `keep_dims` 控制输出和输入的维度是否相同。
@ -12,7 +12,6 @@ mindspore.ops.mean
- **input** (Tensor[Number]) - 输入Tensor其数据类型为数值型。shape :math:`(N, *)` ,其中 :math:`*` 表示任意数量的附加维度。
- **axis** (Union[int, tuple(int), list(int), Tensor]) - 要减少的维度。默认值: ``None`` ,缩小所有维度。只允许常量值。假设 `input` 的秩为r取值范围[-r,r)。
- **keep_dims** (bool) - 如果为 ``True`` 则保留缩小的维度大小为1。否则移除维度。默认值 ``False``
- **dtype** (:class:`mindspore.dtype`) - 期望输出Tensor的类型。默认值 ``None``
返回:
Tensor。

View File

@ -23,4 +23,4 @@ mean_ext:
CPU: MeanExtCPU
GPU: MeanExtGPU
function:
name: mean
name: mean_ext

View File

@ -70,7 +70,7 @@ itemsize_map = {mstype.bool_: 1, mstype.int8: 1, mstype.uint8: 1,
nan_tensor = Tensor(float('nan'), dtype=mstype.float32)
def mean(x, axis=None, keep_dims=False, dtype=None):
def mean(x, axis=None, keep_dims=False):
"""
Reduces a dimension of a tensor by averaging all elements in the dimension.
@ -78,7 +78,6 @@ def mean(x, axis=None, keep_dims=False, dtype=None):
axis (Union[None, int, tuple(int), list(int)]): Dimensions of reduction,
when axis is None or empty tuple, reduce all dimensions. Default: ().
keep_dims (bool): Whether to keep the reduced dimensions. Default: False.
dtype (:class:`mindspore.dtype`): The desired data type of returned Tensor. Default: ``None``.
Returns:
Tensor, has the same data type as input tensor.
@ -94,7 +93,7 @@ def mean(x, axis=None, keep_dims=False, dtype=None):
>>> print(output)
2.0
"""
return F.mean(x, axis, keep_dims, dtype)
return F.mean(x, axis, keep_dims)
def ndimension(x):

View File

@ -1848,11 +1848,11 @@ class Tensor(Tensor_, metaclass=_TensorMeta):
"""
return tensor_operator_registry.get('log2')(self)
def mean(self, axis=None, keep_dims=False, dtype=None):
def mean(self, axis=None, keep_dims=False):
"""
For details, please refer to :func:`mindspore.ops.mean`.
"""
return tensor_operator_registry.get('mean')(self, axis, keep_dims, dtype)
return tensor_operator_registry.get('mean')(self, axis, keep_dims)
def amin(self, axis=None, keepdims=False, *, initial=None, where=None):
"""

View File

@ -65,13 +65,13 @@ from mindspore.ops.functional import sum
# 19
from mindspore.ops.functional import log
# 20
from mindspore.ops import prod
from mindspore.ops.functional import prod
# 21
from mindspore.ops.functional import mul
# 22
# 23
from mindspore.ops.functional import mean_ext as mean
# 24
# 25
@ -284,7 +284,7 @@ __all__ = [
# 22
# 23
'mean',
# 24
# 25

View File

@ -374,6 +374,7 @@ from .math_func import (
amin,
amax,
mean,
mean_ext,
prod,
all,
any,

View File

@ -43,8 +43,8 @@ from mindspore.ops.auto_generate import (minimum, maximum, mul, sin, sinc, sinh,
matrix_exp, sqrt, rsqrt, square, trace, nextafter, abs, acos, acosh, angle,
asin, asinh, atan, atan2, atanh, ceil, equal, erf, erfc, erfinv, exp, expm1,
floor, floor_divide, floor_mod, gcd, greater, greater_equal, less, less_equal,
log, log1p, neg, not_equal, pow, round, isfinite, argmax, mean, sum_ext_op,
prod_ext_op, all)
log, log1p, neg, not_equal, pow, round, isfinite, argmax, mean_ext_op,
sum_ext_op, prod_ext_op, all)
from mindspore.ops.auto_generate import tanh
from mindspore.nn import layer
from mindspore._checkparam import check_is_number
@ -6644,6 +6644,175 @@ def amax(input, axis=None, keepdims=False, *, initial=None, where=None):
return _get_cache_prim(P.ReduceMax)(keepdims)(input, axis)
def mean(x, axis=None, keep_dims=False):
r"""
Reduces all dimension of a tensor by averaging all elements in the dimension, by default.
And reduce a dimension of `x` along the specified `axis`. `keep_dims`
determines whether the dimensions of the output and input are the same.
Args:
x (Tensor[Number]): The input tensor. The dtype of the tensor to be reduced is number.
:math:`(N, *)` where :math:`*` means, any number of additional dimensions.
axis (Union[int, tuple(int), list(int)]): The dimensions to reduce. Default: ``None`` , reduce all dimensions.
Only constant value is allowed. Assume the rank of `x` is r, and the value range is [-r,r).
keep_dims (bool): If true, keep these reduced dimensions and the length is 1.
If false, don't keep these dimensions. Default: ``False`` .
Returns:
Tensor, has the same data type as input tensor.
- If `axis` is None, and `keep_dims` is False,
the output is a 0-D tensor representing the product of all elements in the input tensor.
- If `axis` is int, set as 1, and `keep_dims` is False,
the shape of output is :math:`(x_0, x_2, ..., x_R)`.
- If `axis` is tuple(int), set as (1, 2), and `keep_dims` is ``False`` ,
the shape of output is :math:`(x_0, x_3, ..., x_R)`.
Raises:
TypeError: If `x` is not a Tensor.
TypeError: If `axis` is not one of the following: int, tuple or list.
TypeError: If `keep_dims` is not a bool.
ValueError: If `axis` is out of range.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> import mindspore
>>> import numpy as np
>>> from mindspore import Tensor, ops
>>> x = Tensor(np.random.randn(3, 4, 5, 6).astype(np.float32))
>>> output = ops.mean(x, 1, keep_dims=True)
>>> result = output.shape
>>> print(result)
(3, 1, 5, 6)
>>> # case 1: Reduces a dimension by averaging all elements in the dimension.
>>> x = Tensor(np.array([[[2, 2, 2, 2, 2, 2], [2, 2, 2, 2, 2, 2], [2, 2, 2, 2, 2, 2]],
... [[4, 4, 4, 4, 4, 4], [5, 5, 5, 5, 5, 5], [6, 6, 6, 6, 6, 6]],
... [[6, 6, 6, 6, 6, 6], [8, 8, 8, 8, 8, 8], [10, 10, 10, 10, 10, 10]]]),
... mindspore.float32)
>>> output = ops.mean(x)
>>> print(output)
5.0
>>> print(output.shape)
()
>>> # case 2: Reduces a dimension along the axis 0
>>> output = ops.mean(x, 0, True)
>>> print(output)
[[[4. 4. 4. 4. 4. 4.]
[5. 5. 5. 5. 5. 5.]
[6. 6. 6. 6. 6. 6.]]]
>>> # case 3: Reduces a dimension along the axis 1
>>> output = ops.mean(x, 1, True)
>>> print(output)
[[[2. 2. 2. 2. 2. 2.]]
[[5. 5. 5. 5. 5. 5.]]
[[8. 8. 8. 8. 8. 8.]]]
>>> # case 4: Reduces a dimension along the axis 2
>>> output = ops.mean(x, 2, True)
>>> print(output)
[[[ 2.]
[ 2.]
[ 2.]]
[[ 4.]
[ 5.]
[ 6.]]
[[ 6.]
[ 8.]
[10.]]]
"""
if axis is None:
axis = ()
return _get_cache_prim(P.ReduceMean)(keep_dims)(x, axis)
def mean_ext(input, axis=None, keep_dims=False, dtype=None):
r"""
Reduces all dimension of a tensor by averaging all elements in the dimension, by default.
And reduce a dimension of `input` along the specified `axis`. `keep_dims`
determines whether the dimensions of the output and input are the same.
Note:
The `axis` with tensor type is only used for compatibility with older versions and is not recommended.
Args:
input (Tensor[Number]): The input tensor. The dtype of the tensor to be reduced is number.
:math:`(N, *)` where :math:`*` means, any number of additional dimensions.
axis (Union[int, tuple(int), list(int), Tensor]): The dimensions to reduce. Default: ``None`` ,
reduce all dimensions. Only constant value is allowed. Assume the rank of `input` is r,
and the value range is [-r,r).
keep_dims (bool): If ``True`` , keep these reduced dimensions and the length is 1.
If ``False`` , don't keep these dimensions. Default: ``False`` .
dtype (:class:`mindspore.dtype`): The desired data type of returned Tensor. Default: ``None`` .
Returns:
Tensor, has the same data type as input tensor.
- If `axis` is ``None`` , and `keep_dims` is ``False`` ,
the output is a 0-D tensor representing the product of all elements in the input tensor.
- If `axis` is int, set as 1, and `keep_dims` is ``False`` ,
the shape of output is :math:`(x_0, x_2, ..., x_R)`.
- If `axis` is tuple(int), set as (1, 2), and `keep_dims` is ``False`` ,
the shape of output is :math:`(x_0, x_3, ..., x_R)`.
- If `axis` is 1-D Tensor, set as [1, 2], and `keep_dims` is ``False`` ,
the shape of output is :math:`(x_0, x_3, ..., x_R)`.
Raises:
TypeError: If `x` is not a Tensor.
TypeError: If `axis` is not one of the following: int, tuple, list or Tensor.
TypeError: If `keep_dims` is not a bool.
ValueError: If `axis` is out of range.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> import mindspore
>>> import numpy as np
>>> from mindspore import Tensor, ops
>>> x = Tensor(np.random.randn(3, 4, 5, 6).astype(np.float32))
>>> output = ops.mean(x, 1, keep_dims=True)
>>> result = output.shape
>>> print(result)
(3, 1, 5, 6)
>>> # case 1: Reduces a dimension by averaging all elements in the dimension.
>>> x = Tensor(np.array([[[2, 2, 2, 2, 2, 2], [2, 2, 2, 2, 2, 2], [2, 2, 2, 2, 2, 2]],
... [[4, 4, 4, 4, 4, 4], [5, 5, 5, 5, 5, 5], [6, 6, 6, 6, 6, 6]],
... [[6, 6, 6, 6, 6, 6], [8, 8, 8, 8, 8, 8], [10, 10, 10, 10, 10, 10]]]),
... mindspore.float32)
>>> output = ops.mean(x)
>>> print(output)
5.0
>>> print(output.shape)
()
>>> # case 2: Reduces a dimension along the axis 0
>>> output = ops.mean(x, 0, True)
>>> print(output)
[[[4. 4. 4. 4. 4. 4.]
[5. 5. 5. 5. 5. 5.]
[6. 6. 6. 6. 6. 6.]]]
>>> # case 3: Reduces a dimension along the axis 1
>>> output = ops.mean(x, 1, True)
>>> print(output)
[[[2. 2. 2. 2. 2. 2.]]
[[5. 5. 5. 5. 5. 5.]]
[[8. 8. 8. 8. 8. 8.]]]
>>> # case 4: Reduces a dimension along the axis 2
>>> output = ops.mean(x, 2, True)
>>> print(output)
[[[ 2.]
[ 2.]
[ 2.]]
[[ 4.]
[ 5.]
[ 6.]]
[[ 6.]
[ 8.]
[10.]]]
"""
return mean_ext_op(input, axis, keep_dims, dtype)
def prod(input, axis=None, keep_dims=False, dtype=None):
r"""
Reduces a dimension of a tensor by multiplying all elements in the dimension, by default. And also can
@ -11598,6 +11767,7 @@ __all__ = [
'amin',
'amax',
'mean',
'mean_ext',
'prod',
'all',
'any',

View File

@ -20,7 +20,8 @@ import mindspore.common.dtype as mstype
from mindspore import nn
from mindspore import ops
from mindspore import Tensor
from mindspore.ops.function import mean, prod
from mindspore.ops.function import prod
from mindspore.ops.function import mean_ext as mean
from mindspore.ops.composite import GradOperation
from tests.st.utils import test_utils
from tests.st.ops.dynamic_shape.test_op_utils import TEST_OP