add squeeze functional API

This commit is contained in:
zhujingxuan 2022-07-15 16:07:08 +08:00
parent 4bb47f2166
commit b189d2862f
8 changed files with 192 additions and 31 deletions

View File

@ -55,6 +55,7 @@ from .array_func import (
scalar_to_tensor,
tuple_to_array,
expand_dims,
squeeze,
transpose,
scatter_nd,
scatter_nd_add,

View File

@ -1209,6 +1209,52 @@ def expand_dims(input_x, axis):
return expand_dims_(input_x, axis)
def squeeze(input_x, axis=()):
"""
Return the Tensor after deleting the dimension of size 1 in the specified `axis`.
If :math:`axis=()`, it will remove all the dimensions of size 1.
If `axis` is specified, it will remove the dimensions of size 1 in the given `axis`.
For example, if the dimension is not specified :math:`axis=()`, input shape is (A, 1, B, C, 1, D),
then the shape of the output Tensor is (A, B, C, D). If the dimension is specified, the squeeze operation
is only performed in the specified dimension. If input shape is (A, 1, B), input Tensor will not be
changed when :math:`axis=0` , but when :math:`axis=1` , the shape of the input Tensor will be changed to (A, B).
Note:
- Please note that in dynamic graph mode, the output Tensor will share data with the input Tensor,
and there is no Tensor data copy process.
- The dimension index starts at 0 and must be in the range `[-input.ndim, input.ndim]`.
Args:
input_x (Tensor): The shape of tensor is :math:`(x_1, x_2, ..., x_R)`.
axis (Union[int, tuple(int)]): Specifies the dimension indexes of shape to be removed, which will remove
all the dimensions of size 1 in the given axis parameter. If specified, it must be int32 or int64.
Default: (), an empty tuple.
Outputs:
Tensor, the shape of tensor is :math:`(x_1, x_2, ..., x_S)`.
Raises:
TypeError: If `axis` is neither an int nor tuple.
TypeError: If `axis` is a tuple whose elements are not all int.
ValueError: If the corresponding dimension of the specified axis isn't equal to 1.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> input_x = Tensor(np.ones(shape=[3, 2, 1]), mindspore.float32)
>>> squeeze = ops.Squeeze(2)
>>> output = squeeze(input_x)
>>> print(output)
[[1. 1.]
[1. 1.]
[1. 1.]]
"""
squeeze_ = _get_cache_prim(P.Squeeze)(axis)
return squeeze_(input_x)
def transpose(input_x, input_perm):
"""
Permutes the dimensions of the input tensor according to input permutation.
@ -3985,6 +4031,7 @@ __all__ = [
'batch_to_space_nd',
'tuple_to_array',
'expand_dims',
'squeeze',
'transpose',
'scatter_nd',
'scatter_nd_add',

View File

@ -63,7 +63,6 @@ sort = P.Sort()
tensor_range = P.Range()
if not security.enable_security():
print_ = P.Print()
squeeze = P.Squeeze()
tensor_scatter_update = P.TensorScatterUpdate()
scatter_nd_update = P.ScatterNdUpdate()

View File

@ -849,33 +849,7 @@ class Squeeze(Primitive):
"""
Return the Tensor after deleting the dimension of size 1 in the specified `axis`.
If :math:`axis=()`, it will remove all the dimensions of size 1.
If `axis` is specified, it will remove the dimensions of size 1 in the given `axis`.
For example, if the dimension is not specified :math:`axis=()`, input shape is (A, 1, B, C, 1, D),
then the shape of the output Tensor is (A, B, C, D). If the dimension is specified, the squeeze operation
is only performed in the specified dimension. If input shape is (A, 1, B), input Tensor will not be
changed when :math:`axis=0` , but when :math:`axis=1` , the shape of the input Tensor will be changed to (A, B).
Note:
- Please note that in dynamic graph mode, the output Tensor will share data with the input Tensor,
and there is no Tensor data copy process.
- The dimension index starts at 0 and must be in the range `[-input.ndim, input.ndim]`.
Args:
axis (Union[int, tuple(int)]): Specifies the dimension indexes of shape to be removed, which will remove
all the dimensions of size 1 in the given axis parameter. If specified, it must be int32 or int64.
Default: (), an empty tuple.
Inputs:
- **input_x** (Tensor) - The shape of tensor is :math:`(x_1, x_2, ..., x_R)`.
Outputs:
Tensor, the shape of tensor is :math:`(x_1, x_2, ..., x_S)`.
Raises:
TypeError: If `axis` is neither an int nor tuple.
TypeError: If `axis` is a tuple whose elements are not all int.
ValueError: If the corresponding dimension of the specified axis isn't equal to 1.
Refer to :func:`mindspore.ops.squeeze` for more detail.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``

View File

@ -17,7 +17,7 @@ import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore import Tensor, ops
from mindspore.ops import operations as P
@ -73,3 +73,33 @@ def test_net_constant():
net = NetConstant(x)
output = net()
assert np.all(output.asnumpy() == np.expand_dims(x, -1))
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_func():
"""
Feature: Test ExpandDims CPU.
Description: Test functional api.
Expectation: match to np benchmark.
"""
context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
output = ops.expand_dims(Tensor(x), -1)
assert np.all(output.asnumpy() == np.expand_dims(x, -1))
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_tensor():
"""
Feature: Test ExpandDims CPU.
Description: Test Tensor api.
Expectation: match to np benchmark.
"""
context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
output = Tensor(x).expand_dims(-1)
assert np.all(output.asnumpy() == np.expand_dims(x, -1))

View File

@ -49,3 +49,43 @@ def test_squeeze(data_type):
net = SqueezeNet()
output = net(Tensor(x))
assert np.all(output.asnumpy() == x.squeeze())
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_func():
"""
Feature: Test Squeeze CPU.
Description: Test functional api.
Expectation: match to np benchmark.
"""
context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
np.random.seed(0)
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
output = P.squeeze(Tensor(x))
assert np.all(output.asnumpy() == x.squeeze())
output = P.squeeze(Tensor(x), 0)
assert np.all(output.asnumpy() == x.squeeze(0))
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_tensor():
"""
Feature: Test Squeeze CPU.
Description: Test Tensor api.
Expectation: match to np benchmark.
"""
context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
np.random.seed(0)
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
output = Tensor(x).squeeze()
assert np.all(output.asnumpy() == x.squeeze())
output = Tensor(x).squeeze(0)
assert np.all(output.asnumpy() == x.squeeze(0))

View File

@ -17,7 +17,7 @@ import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore import Tensor, ops
from mindspore.ops import operations as P
@ -73,3 +73,33 @@ def test_net_constant():
net = NetConstant(x)
output = net()
assert np.all(output.asnumpy() == np.expand_dims(x, -1))
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_func():
"""
Feature: Test ExpandDims GPU.
Description: Test functional api.
Expectation: match to np benchmark.
"""
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
output = ops.expand_dims(Tensor(x), -1)
assert np.all(output.asnumpy() == np.expand_dims(x, -1))
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_tensor():
"""
Feature: Test ExpandDims GPU.
Description: Test Tensor api.
Expectation: match to np benchmark.
"""
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
output = Tensor(x).expand_dims(-1)
assert np.all(output.asnumpy() == np.expand_dims(x, -1))

View File

@ -17,7 +17,7 @@ import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore import Tensor, ops
from mindspore.ops import operations as P
@ -49,3 +49,43 @@ def test_squeeze(data_type):
net = SqueezeNet()
output = net(Tensor(x))
assert np.all(output.asnumpy() == x.squeeze())
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_func():
"""
Feature: Test Squeeze GPU.
Description: Test functional api.
Expectation: match to np benchmark.
"""
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(0)
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
output = ops.squeeze(Tensor(x))
assert np.all(output.asnumpy() == x.squeeze())
output = ops.squeeze(Tensor(x), 0)
assert np.all(output.asnumpy() == x.squeeze(0))
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_tensor():
"""
Feature: Test Squeeze GPU.
Description: Test Tensor api.
Expectation: match to np benchmark.
"""
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(0)
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
output = Tensor(x).squeeze()
assert np.all(output.asnumpy() == x.squeeze())
output = Tensor(x).squeeze(0)
assert np.all(output.asnumpy() == x.squeeze(0))