!48206 fix_ops_any_all_master

Merge pull request !48206 from yide12/tensor_isneginf_master
This commit is contained in:
i-robot 2023-01-31 11:21:28 +00:00 committed by Gitee
commit 5acace7f6a
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
8 changed files with 173 additions and 20 deletions

View File

@ -297,8 +297,10 @@ Reduction函数
:nosignatures:
:template: classtemplate.rst
mindspore.ops.all
mindspore.ops.amax
mindspore.ops.amin
mindspore.ops.any
mindspore.ops.argmax
mindspore.ops.argmin
mindspore.ops.cummax

View File

@ -0,0 +1,23 @@
mindspore.ops.all
=================
.. py:function:: mindspore.ops.all(x, axis=(), keep_dims=False)
默认情况下,通过对维度中所有元素进行“逻辑与”来减少 `x` 的维度。也可以沿轴减少 `x` 的维度。通过控制 `keep_dim` 来确定输出和输入的维度是否相同。
参数:
- **x** (Tensor[bool]) - 输入Tensor其数据类型为bool型。shape是 :math:`(N, *)` ,其中 :math:`*` 表示任意数量的附加维度。秩应小于8。
- **axis** (Union[int, tuple(int), list(int)], 可选) - 要减少的维度。只允许常量值。假设 `x` 的秩为r取值范围[-r,r)。默认值:(),缩小所有维度。
- **keep_dims** (bool, 可选) - 如果为True则保留缩小的维度大小为1。否则移除维度。默认值False。
返回:
Tensor数据类型是bool。
- 如果 `axis` 为(),且 `keep_dims` 为False则输出一个零维Tensor表示输入Tensor中所有元素进行“逻辑与”。
- 如果 `axis` 为int取值为2并且 `keep_dims` 为False则输出的shape为 :math:`(x_1, x_3, ..., x_R)`
- 如果 `axis` 为tuple(int)或list(int),取值为(2, 3),并且 `keep_dims` 为False则输出Tensor的shape为 :math:`(x_1, x_4, ..., x_R)`
异常:
- **TypeError** - `keep_dims` 不是bool类型。
- **TypeError** - `x` 不是Tensor。
- **TypeError** - `axis` 不是以下数据类型之一int、tuple或list。

View File

@ -0,0 +1,23 @@
mindspore.ops.any
=================
.. py:function:: mindspore.ops.any(x, axis=(), keep_dims=False)
默认情况下,通过对维度中所有元素进行“逻辑或”来减少 `x` 的维度。也可以沿轴减少 `x` 的维度。通过控制 `keep_dim` 来确定输出和输入的维度是否相同。
参数:
- **x** (Tensor[bool]) - 输入Tensor其数据类型为bool型。shape是 :math:`(N, *)` ,其中 :math:`*` 表示任意数量的附加维度。秩应小于8。
- **axis** (Union[int, tuple(int), list(int)], 可选) - 要减少的维度。只允许常量值。假设 `x` 的秩为r取值范围[-r,r)。默认值:(),缩小所有维度。
- **keep_dims** (bool, 可选) - 如果为True则保留缩小的维度大小为1。否则移除维度。默认值False。
返回:
Tensor数据类型是bool。
- 如果 `axis` 为(),且 `keep_dims` 为False则输出一个零维Tensor表示输入Tensor中所有元素进行“逻辑或”。
- 如果 `axis` 为int取值为2并且 `keep_dims` 为False则输出的shape为 :math:`(x_1, x_3, ..., x_R)`
- 如果 `axis` 为tuple(int)或list(int),取值为(2, 3),并且 `keep_dims` 为False则输出Tensor的shape为 :math:`(x_1, x_4, ..., x_R)`
异常:
- **TypeError** - `keep_dims` 不是bool类型。
- **TypeError** - `x` 不是Tensor。
- **TypeError** - `axis` 不是以下数据类型之一int、tuple或list。

View File

@ -297,8 +297,10 @@ Reduction Functions
:nosignatures:
:template: classtemplate.rst
mindspore.ops.all
mindspore.ops.amax
mindspore.ops.amin
mindspore.ops.any
mindspore.ops.argmax
mindspore.ops.argmin
mindspore.ops.cummax

View File

@ -7674,8 +7674,7 @@ def _check_matmul_shapes(shape1, shape2, prim_name=None):
r_shape2 = shape2[:-2]
max_len = max(len(r_shape1), len(r_shape2))
for i in range(max_len):
items = [it[i - max_len + len(it)] if i - max_len +
len(it) >= 0 else 1 for it in (r_shape1, r_shape2)]
items = [it[i - max_len + len(it)] if i - max_len + len(it) >= 0 else 1 for it in (r_shape1, r_shape2)]
max_size = max(items)
shape_out.append(max_size)
return tuple(shape_out)
@ -8470,23 +8469,24 @@ def kron(x, y):
def all(x, axis=(), keep_dims=False):
r"""
Reduces a dimension of a tensor by the "logicalAND" of all elements in the dimension, by default. And also can
Reduces a dimension of `x` by the "logicalAND" of all elements in the dimension, by default. And also can
reduce a dimension of `x` along the axis. Determine whether the dimensions of the output and input are the same by
controlling `keep_dims`.
Args:
x (Tensor[bool]): The input tensor. The dtype of the tensor to be reduced is bool.
x (Tensor[bool]): The input Tensor. The dtype of the Tensor is bool.
:math:`(N,*)` where :math:`*` means, any number of additional dimensions, its rank should be less than 8.
axis (Union[int, tuple(int), list(int)]): The dimensions to reduce. Default: (), reduce all dimensions.
Only constant value is allowed. Must be in the range [-rank(x), rank(x)).
keep_dims (bool): If true, keep these reduced dimensions and the length is 1.
If false, don't keep these dimensions. Default : False.
axis (Union[int, tuple(int), list(int)], optional): The dimensions to reduce.
Only constant value is allowed. Supposed the rank of `x` is r,
axis must be in the range [-rank(x), rank(x)). Default: (), all dimensions are reduced.
keep_dims (bool, optional): If true, keep these reduced dimensions and the length is 1.
If false, don't keep these dimensions. Default : False.
Returns:
Tensor, the dtype is bool.
- If axis is (), and keep_dims is False,
the output is a 0-D tensor representing the "logical and" of all elements in the input tensor.
the output is a 0-D Tensor representing the "logical and" of all elements in the input Tensor.
- If axis is int, set as 2, and keep_dims is False,
the shape of output is :math:`(x_1, x_3, ..., x_R)`.
- If axis is tuple(int), set as (2, 3), and keep_dims is False,
@ -8523,23 +8523,24 @@ def all(x, axis=(), keep_dims=False):
def any(x, axis=(), keep_dims=False):
r"""
Reduces a dimension of a tensor by the "logical OR" of all elements in the dimension, by default. And also can
Reduces a dimension of `x` by the "logical OR" of all elements in the dimension, by default. And also can
reduce a dimension of `x` along the axis. Determine whether the dimensions of the output and input are the same by
controlling `keep_dims`.
Args:
x (Tensor[bool]): The input tensor. The dtype of the tensor to be reduced is bool.
x (Tensor[bool]): The input Tensor. The dtype of the Tensor is bool.
:math:`(N,*)` where :math:`*` means, any number of additional dimensions, its rank should be less than 8.
axis (Union[int, tuple(int), list(int)]): The dimensions to reduce. Default: (), reduce all dimensions.
Only constant value is allowed. Must be in the range [-rank(x), rank(x)).
keep_dims (bool): If true, keep these reduced dimensions and the length is 1.
If false, don't keep these dimensions. Default : False.
axis (Union[int, tuple(int), list(int)], optional): The dimensions to reduce.
Only constant value is allowed. Supposed the rank of `x` is r,
axis must be in the range [-rank(x), rank(x)). Default: (), all dimensions are reduced.
keep_dims (bool, optional): If true, keep these reduced dimensions and the length is 1.
If false, don't keep these dimensions. Default : False.
Returns:
Tensor, the dtype is bool.
- If axis is (), and keep_dims is False,
the output is a 0-D tensor representing the "logical or" of all elements in the input tensor.
the output is a 0-D Tensor representing the "logical or" of all elements in the input Tensor.
- If axis is int, set as 2, and keep_dims is False,
the shape of output is :math:`(x_1, x_3, ..., x_R)`.
- If axis is tuple(int), set as (2, 3), and keep_dims is False,

View File

@ -4897,12 +4897,12 @@ def pixel_shuffle(x, upscale_factor):
"by `upscale_factor` squared.")
c = c // upscale_factor ** 2
input_perm = (pre + (c, upscale_factor, upscale_factor, h, w))
reshape = ops.Reshape()
reshape = _get_cache_prim(P.Reshape)()
transpose = _get_cache_prim(P.Transpose)()
x = reshape(x, input_perm)
input_perm = [i for i in range(length - 2)]
input_perm = input_perm + [length, length - 2, length + 1, length - 1]
input_perm = tuple(input_perm)
transpose = ops.Transpose()
x = transpose(x, input_perm)
x = reshape(x, (pre + (c, upscale_factor * h, upscale_factor * w)))
return x
@ -4953,12 +4953,12 @@ def pixel_unshuffle(x, downscale_factor):
h = h // downscale_factor
w = w // downscale_factor
input_perm = (pre + (c, h, downscale_factor, w, downscale_factor))
reshape = ops.Reshape()
reshape = _get_cache_prim(P.Reshape)()
transpose = _get_cache_prim(P.Transpose)()
x = reshape(x, input_perm)
input_perm = [i for i in range(length - 2)]
input_perm = input_perm + [length - 1, length + 1, length - 2, length]
input_perm = tuple(input_perm)
transpose = ops.Transpose()
x = transpose(x, input_perm)
x = reshape(x, (pre + (c * downscale_factor * downscale_factor, h, w)))
return x

View File

@ -0,0 +1,51 @@
# Copyright 2023 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import pytest
import mindspore as ms
import mindspore.nn as nn
from mindspore import Tensor, ops
class Net(nn.Cell):
def construct(self, x):
return ops.all(x, axis=1, keep_dims=True)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_arm_cpu
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
@pytest.mark.parametrize('mode', [ms.GRAPH_MODE, ms.PYNATIVE_MODE])
def test_ops_all(mode):
"""
Feature: ops.all
Description: Verify the result of all
Expectation: success
"""
ms.set_context(mode=mode)
x = Tensor([[True, True],
[False, True],
[True, False],
[False, False]], ms.bool_)
net = Net()
output = net(x)
expect_output = Tensor([[True],
[False],
[False],
[False]], ms.bool_)
assert all(output == expect_output)

View File

@ -0,0 +1,51 @@
# Copyright 2023 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import pytest
import mindspore as ms
import mindspore.nn as nn
from mindspore import Tensor, ops
class Net(nn.Cell):
def construct(self, x):
return ops.any(x, axis=1, keep_dims=True)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_arm_cpu
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
@pytest.mark.parametrize('mode', [ms.GRAPH_MODE, ms.PYNATIVE_MODE])
def test_ops_any(mode):
"""
Feature: ops.any
Description: Verify the result of any
Expectation: success
"""
ms.set_context(mode=mode)
x = Tensor([[True, True],
[False, True],
[True, False],
[False, False]], ms.bool_)
net = Net()
output = net(x)
expect_output = Tensor([[True],
[True],
[True],
[False]], ms.bool_)
assert all(output == expect_output)