!35916 [OP]delete space_to_batch_nd tensor api

Merge pull request !35916 from zichun_ye/del_space_to_batch_nd_tensor
This commit is contained in:
i-robot 2022-06-15 02:04:23 +00:00 committed by Gitee
commit c5563116d0
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
6 changed files with 0 additions and 117 deletions

View File

@ -1195,31 +1195,6 @@ mindspore.Tensor
- **TypeError** - `y` 不是Tensor、int或者float。
- **ValueError** - 输入的shape不相同。
.. py:method:: space_to_batch_nd(block_shape, paddings)
将空间维度划分为对应大小的块,然后在批次维度重排张量。
更多参考详见 :func:`mindspore.ops.space_to_batch_nd`
**参数:**
- **block_shape** (list[int], tuple[int], int) - 块形状描述空间维度为分割的个数。
- **paddings** (tuple, list) - 空间维度的填充大小。
**返回:**
Tensor经过划分排列之后的结果。
**异常:**
- **TypeError** - 如果 `block_shape` 不是 list, tuple 或者 int。
- **TypeError** - 如果 `paddings` 不是 list 或者 tuple。
- **ValueError** - 如果当 `block_shape` 为 list 或 tuple `block_shape` 不是一维。
- **ValueError** - 如果 Ascend 平台上 `block_shape` 长度不是2。
- **ValueError** - 如果 `paddings` 的形状不是 (2, M), 其中 M 为 `block_shape` 的长度。
- **ValueError** - 如果 `block_shape` 的元素不是大于一的整数。
- **ValueError** - 如果 `paddings` 的元素不是非负的整数。
.. py:method:: shape
:property:

View File

@ -221,7 +221,6 @@ BuiltInTypeMap &GetMethodMap() {
{"argmin", std::string("argmin")}, // P.Argmax()
{"resize", std::string("resize")}, // P.Reshape()
{"select", std::string("select")}, // P.Select()
{"space_to_batch_nd", std::string("space_to_batch_nd")}, // P.SpaceToBatchND
{"batch_to_space_nd", std::string("batch_to_space_nd")}, // P.BatchToSpaceND
{"choose", std::string("choose")}, // P.Select()
{"diagonal", std::string("diagonal")}, // P.Eye()

View File

@ -2191,15 +2191,6 @@ def filter_(fun, iter_):
return result
def space_to_batch_nd(x, block_shape, paddings):
"""
Divides spatial dimensions into blocks and combines the block size with the original batch.
Refer to :func:`mindspore.ops.space_to_batch_nd` for more detail.
"""
return P.SpaceToBatchND(block_shape, paddings)(x)
def batch_to_space_nd(x, block_shape, crops):
r"""
Divides batch dimension with blocks and interleaves these blocks back into spatial dimensions.

View File

@ -1566,8 +1566,6 @@ class Tensor(Tensor_):
self._init_check()
return tensor_operator_registry.get('col2im')(self, output_size, kernel_size, dilation, padding_value, stride)
def reshape(self, *shape):
"""
Give a new shape to a tensor without changing its data.
@ -3141,48 +3139,6 @@ class Tensor(Tensor_):
j = tensor_operator_registry.get('select')(mask, mid, j)
return j
def space_to_batch_nd(self, block_shape, paddings):
r"""
Divides spatial dimensions into blocks and combines the block size with the original batch.
Refer to :func:`mindspore.ops.space_to_batch_nd` for more detail.
Args:
block_shape (Union[list(int), tuple(int), int]): The block size of dividing block with all value greater
than 1.
paddings (Union[tuple, list]): The padding values for spatial dimensions, containing M subtraction list.
Each contains 2 integer values.
Returns:
Tensor, the output tensor with the same data type as the input tensor.
Raises:
ValueError: If `block_shape` is not one dimensional when `block_shape` is a list or tuple.
ValueError: If the length of `block_shape` is not 2 on Ascend.
ValueError: If the element of `block_shape` is not an integer larger than 1.
ValueError: If shape of `paddings` is not (2, M), where M is the length of `block_shape`.
ValueError: If the element of `paddings` is not an integer larger than 0.
TypeError: If `block_shape` is not one of list, tuple, int.
TypeError: If `paddings` is neither list nor tuple.
Supported Platforms:
``Ascend`` ``CPU``
Examples:
>>> import numpy as np
>>> from mindspore import Tensor
>>> block_shape = [2, 2]
>>> paddings = [[0, 0], [0, 0]]
>>> input_x = Tensor(np.array([[[[1, 2], [3, 4]]]]), mindspore.float32)
>>> output = input_x.space_to_batch_nd(block_shape, paddings)
>>> print(output)
[[[[1.]]]
[[[2.]]]
[[[3.]]]
[[[4.]]]]
"""
return tensor_operator_registry.get('space_to_batch_nd')(block_shape, paddings)(self)
def batch_to_space_nd(self, block_shape, crops):
"""
Divides batch dimension with blocks and interleaves these blocks back into spatial dimensions.

View File

@ -950,7 +950,6 @@ tensor_operator_registry.register('sum', P.ReduceSum)
tensor_operator_registry.register('split', P.Split)
tensor_operator_registry.register('select', P.Select)
tensor_operator_registry.register('zeros_like', P.ZerosLike)
tensor_operator_registry.register('space_to_batch_nd', P.SpaceToBatchND)
tensor_operator_registry.register('batch_to_space_nd', P.BatchToSpaceND)
tensor_operator_registry.register('one_hot', P.OneHot)
tensor_operator_registry.register('masked_fill', masked_fill)

View File

@ -117,43 +117,6 @@ def test_space_to_batch_nd_function():
np.testing.assert_array_equal(output.asnumpy(), expect)
class SpaceToBatchNDTensorNet(nn.Cell):
def __init__(self, block_size=2):
super(SpaceToBatchNDTensorNet, self).__init__()
self.block_size = block_size
def construct(self, x):
return x.space_to_batch_nd(self.block_size, [[0, 0], [0, 0]])
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_space_to_batch_nd_tensor():
"""
Feature: test SpaceToBatchND tensor interface.
Description: test tensor interface.
Expectation: the result match with numpy result
"""
net = SpaceToBatchNDTensorNet(2)
input_x = Tensor(np.arange(16).reshape((1, 1, 4, 4)).astype(np.float32), mindspore.float32)
expect = np.array([[[[0, 2],
[8, 10]]],
[[[1, 3],
[9, 11]]],
[[[4, 6],
[12, 14]]],
[[[5, 7],
[13, 15]]]]).astype(np.float32)
context.set_context(mode=context.PYNATIVE_MODE, device_target="CPU")
output = net(input_x)
assert (output.asnumpy() == expect).all()
context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
output = net(input_x)
assert (output.asnumpy() == expect).all()
class SpaceToBatchNDDynamicShapeNetMS(nn.Cell):
def __init__(self, block_size, paddings, axis=0):
super().__init__()