From 5a3b579a5b3791c6c825bed1a10abe2b62fdb5b6 Mon Sep 17 00:00:00 2001 From: Zichun Ye Date: Tue, 14 Jun 2022 14:43:30 +0800 Subject: [PATCH] delete space_to_batch_nd tensor api --- .../api_python/mindspore/mindspore.Tensor.rst | 25 ----------- mindspore/ccsrc/pipeline/jit/resource.cc | 1 - .../_extends/parse/standard_method.py | 9 ---- mindspore/python/mindspore/common/tensor.py | 44 ------------------- mindspore/python/mindspore/ops/functional.py | 1 - tests/st/ops/cpu/test_spacetobatchnd_op.py | 37 ---------------- 6 files changed, 117 deletions(-) diff --git a/docs/api/api_python/mindspore/mindspore.Tensor.rst b/docs/api/api_python/mindspore/mindspore.Tensor.rst index fa0feed21d5..378bc3d2956 100644 --- a/docs/api/api_python/mindspore/mindspore.Tensor.rst +++ b/docs/api/api_python/mindspore/mindspore.Tensor.rst @@ -1290,31 +1290,6 @@ mindspore.Tensor - **TypeError** - `y` 不是Tensor、int或者float。 - **ValueError** - 输入的shape不相同。 - .. py:method:: space_to_batch_nd(block_shape, paddings) - - 将空间维度划分为对应大小的块,然后在批次维度重排张量。 - - 更多参考详见 :func:`mindspore.ops.space_to_batch_nd`。 - - **参数:** - - - **block_shape** (list[int], tuple[int], int) - 块形状描述空间维度为分割的个数。 - - **paddings** (tuple, list) - 空间维度的填充大小。 - - **返回:** - - Tensor,经过划分排列之后的结果。 - - **异常:** - - - **TypeError** - 如果 `block_shape` 不是 list, tuple 或者 int。 - - **TypeError** - 如果 `paddings` 不是 list 或者 tuple。 - - **ValueError** - 如果当 `block_shape` 为 list 或 tuple, `block_shape` 不是一维。 - - **ValueError** - 如果 Ascend 平台上 `block_shape` 长度不是2。 - - **ValueError** - 如果 `paddings` 的形状不是 (2, M), 其中 M 为 `block_shape` 的长度。 - - **ValueError** - 如果 `block_shape` 的元素不是大于一的整数。 - - **ValueError** - 如果 `paddings` 的元素不是非负的整数。 - .. py:method:: shape :property: diff --git a/mindspore/ccsrc/pipeline/jit/resource.cc b/mindspore/ccsrc/pipeline/jit/resource.cc index c6129ec3c2d..c19afe44632 100644 --- a/mindspore/ccsrc/pipeline/jit/resource.cc +++ b/mindspore/ccsrc/pipeline/jit/resource.cc @@ -221,7 +221,6 @@ BuiltInTypeMap &GetMethodMap() { {"argmin", std::string("argmin")}, // P.Argmax() {"resize", std::string("resize")}, // P.Reshape() {"select", std::string("select")}, // P.Select() - {"space_to_batch_nd", std::string("space_to_batch_nd")}, // P.SpaceToBatchND {"batch_to_space_nd", std::string("batch_to_space_nd")}, // P.BatchToSpaceND {"choose", std::string("choose")}, // P.Select() {"diagonal", std::string("diagonal")}, // P.Eye() diff --git a/mindspore/python/mindspore/_extends/parse/standard_method.py b/mindspore/python/mindspore/_extends/parse/standard_method.py index 53bc5714925..5d3ee366223 100644 --- a/mindspore/python/mindspore/_extends/parse/standard_method.py +++ b/mindspore/python/mindspore/_extends/parse/standard_method.py @@ -2224,15 +2224,6 @@ def filter_(fun, iter_): return result -def space_to_batch_nd(x, block_shape, paddings): - """ - Divides spatial dimensions into blocks and combines the block size with the original batch. - - Refer to :func:`mindspore.ops.space_to_batch_nd` for more detail. - """ - return P.SpaceToBatchND(block_shape, paddings)(x) - - def batch_to_space_nd(x, block_shape, crops): r""" Divides batch dimension with blocks and interleaves these blocks back into spatial dimensions. diff --git a/mindspore/python/mindspore/common/tensor.py b/mindspore/python/mindspore/common/tensor.py index e6634127b62..139b40e0b13 100644 --- a/mindspore/python/mindspore/common/tensor.py +++ b/mindspore/python/mindspore/common/tensor.py @@ -1687,8 +1687,6 @@ class Tensor(Tensor_): self._init_check() return tensor_operator_registry.get('col2im')(self, output_size, kernel_size, dilation, padding_value, stride) - - def reshape(self, *shape): """ Give a new shape to a tensor without changing its data. @@ -3262,48 +3260,6 @@ class Tensor(Tensor_): j = tensor_operator_registry.get('select')(mask, mid, j) return j - def space_to_batch_nd(self, block_shape, paddings): - r""" - Divides spatial dimensions into blocks and combines the block size with the original batch. - - Refer to :func:`mindspore.ops.space_to_batch_nd` for more detail. - - Args: - block_shape (Union[list(int), tuple(int), int]): The block size of dividing block with all value greater - than 1. - paddings (Union[tuple, list]): The padding values for spatial dimensions, containing M subtraction list. - Each contains 2 integer values. - - Returns: - Tensor, the output tensor with the same data type as the input tensor. - - Raises: - ValueError: If `block_shape` is not one dimensional when `block_shape` is a list or tuple. - ValueError: If the length of `block_shape` is not 2 on Ascend. - ValueError: If the element of `block_shape` is not an integer larger than 1. - ValueError: If shape of `paddings` is not (2, M), where M is the length of `block_shape`. - ValueError: If the element of `paddings` is not an integer larger than 0. - TypeError: If `block_shape` is not one of list, tuple, int. - TypeError: If `paddings` is neither list nor tuple. - - Supported Platforms: - ``Ascend`` ``CPU`` - - Examples: - >>> import numpy as np - >>> from mindspore import Tensor - >>> block_shape = [2, 2] - >>> paddings = [[0, 0], [0, 0]] - >>> input_x = Tensor(np.array([[[[1, 2], [3, 4]]]]), mindspore.float32) - >>> output = input_x.space_to_batch_nd(block_shape, paddings) - >>> print(output) - [[[[1.]]] - [[[2.]]] - [[[3.]]] - [[[4.]]]] - """ - return tensor_operator_registry.get('space_to_batch_nd')(block_shape, paddings)(self) - def batch_to_space_nd(self, block_shape, crops): """ Divides batch dimension with blocks and interleaves these blocks back into spatial dimensions. diff --git a/mindspore/python/mindspore/ops/functional.py b/mindspore/python/mindspore/ops/functional.py index 5fbc5024ac1..39b2bbfa675 100644 --- a/mindspore/python/mindspore/ops/functional.py +++ b/mindspore/python/mindspore/ops/functional.py @@ -951,7 +951,6 @@ tensor_operator_registry.register('sum', P.ReduceSum) tensor_operator_registry.register('split', P.Split) tensor_operator_registry.register('select', P.Select) tensor_operator_registry.register('zeros_like', P.ZerosLike) -tensor_operator_registry.register('space_to_batch_nd', P.SpaceToBatchND) tensor_operator_registry.register('batch_to_space_nd', P.BatchToSpaceND) tensor_operator_registry.register('one_hot', P.OneHot) tensor_operator_registry.register('masked_fill', masked_fill) diff --git a/tests/st/ops/cpu/test_spacetobatchnd_op.py b/tests/st/ops/cpu/test_spacetobatchnd_op.py index 1a81dea7675..7a0d4cd0a14 100644 --- a/tests/st/ops/cpu/test_spacetobatchnd_op.py +++ b/tests/st/ops/cpu/test_spacetobatchnd_op.py @@ -117,43 +117,6 @@ def test_space_to_batch_nd_function(): np.testing.assert_array_equal(output.asnumpy(), expect) -class SpaceToBatchNDTensorNet(nn.Cell): - def __init__(self, block_size=2): - super(SpaceToBatchNDTensorNet, self).__init__() - self.block_size = block_size - - def construct(self, x): - return x.space_to_batch_nd(self.block_size, [[0, 0], [0, 0]]) - - -@pytest.mark.level0 -@pytest.mark.platform_x86_cpu -@pytest.mark.env_onecard -def test_space_to_batch_nd_tensor(): - """ - Feature: test SpaceToBatchND tensor interface. - Description: test tensor interface. - Expectation: the result match with numpy result - """ - net = SpaceToBatchNDTensorNet(2) - input_x = Tensor(np.arange(16).reshape((1, 1, 4, 4)).astype(np.float32), mindspore.float32) - expect = np.array([[[[0, 2], - [8, 10]]], - [[[1, 3], - [9, 11]]], - [[[4, 6], - [12, 14]]], - [[[5, 7], - [13, 15]]]]).astype(np.float32) - - context.set_context(mode=context.PYNATIVE_MODE, device_target="CPU") - output = net(input_x) - assert (output.asnumpy() == expect).all() - context.set_context(mode=context.GRAPH_MODE, device_target="CPU") - output = net(input_x) - assert (output.asnumpy() == expect).all() - - class SpaceToBatchNDDynamicShapeNetMS(nn.Cell): def __init__(self, block_size, paddings, axis=0): super().__init__()