!192 Modify api name Stack -> Pack, Unstack -> Unpack in ME.

Merge pull request !192 from liuxiao/temp
This commit is contained in:
mindspore-ci-bot 2020-04-09 20:08:42 +08:00 committed by Gitee
commit 50cb2ad944
5 changed files with 64 additions and 72 deletions

View File

@ -148,8 +148,8 @@ const char kNameSlice[] = "Slice";
const char kNameAddN[] = "AddN";
const char kNameLess[] = "Less";
const char kNameGreater[] = "Greater";
const char kNameStack[] = "Stack";
const char kNameUnstack[] = "Unstack";
const char kNamePack[] = "Pack";
const char kNameUnpack[] = "Unpack";
const char kNameMerge[] = "Merge";
const char kNameGeSwitch[] = "GeSwitch";
@ -202,8 +202,8 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{string(kNameAvgPool), ADPT_DESC(AvgPool)},
{string(kNameMaxPoolWithArgmax), ADPT_DESC(MaxPoolWithArgmax)},
{string(kNameTopK), ADPT_DESC(TopKV2)},
{string(kNameStack), ADPT_DESC(Pack)},
{string(kNameUnstack), ADPT_DESC(Unpack)},
{string(kNamePack), ADPT_DESC(Pack)},
{string(kNameUnpack), ADPT_DESC(Unpack)},
{string(kNameSplitD), ADPT_DESC(SplitD)},
{string(kNameAllReduce), ADPT_DESC(HcomAllReduce)},
{string(kNameBroadcast), ADPT_DESC(HcomBroadcast)},

View File

@ -266,26 +266,26 @@ def get_bprop_gather_v2(self):
return bprop
@bprop_getters.register(P.Stack)
def get_bprop_stack(self):
"""Generate bprop for Stack"""
@bprop_getters.register(P.Pack)
def get_bprop_pack(self):
"""Generate bprop for Pack"""
axis = self.axis
def bprop(x, out, dout):
stack_grad = P.Unstack(axis)
out = stack_grad(dout)
pack_grad = P.Unpack(axis)
out = pack_grad(dout)
return (out,)
return bprop
@bprop_getters.register(P.Unstack)
def get_bprop_unstack(self):
"""Generate bprop for Unstack"""
@bprop_getters.register(P.Unpack)
def get_bprop_unpack(self):
"""Generate bprop for Unpack"""
axis = self.axis
def bprop(x, out, dout):
unstack_grad = P.Stack(axis)
out = unstack_grad(dout)
unpack_grad = P.Pack(axis)
out = unpack_grad(dout)
return (out,)
return bprop

View File

@ -19,7 +19,7 @@ Primitive operator classes.
A collection of operators to build nerual networks or computing functions.
"""
from .array_ops import (Argmax, Argmin, Cast, ConcatOffset, Concat, Stack, Unstack,
from .array_ops import (Argmax, Argmin, Cast, ConcatOffset, Concat, Pack, Unpack,
Diag, DiagPart, DType, ExpandDims, Eye,
Fill, GatherNd, GatherV2, InvertPermutation,
IsInstance, IsSubClass, ArgMaxWithValue, OnesLike, ZerosLike,
@ -112,8 +112,8 @@ __all__ = [
'OneHot',
'GatherV2',
'Concat',
'Stack',
'Unstack',
'Pack',
'Unpack',
'Tile',
'BiasAdd',
'Gelu',

View File

@ -1350,8 +1350,8 @@ class Concat(PrimitiveWithInfer):
return out
def _get_stack_shape(x_shape, x_type, axis):
"""for satck output shape"""
def _get_pack_shape(x_shape, x_type, axis):
"""for pack output shape"""
validator.check_type("shape", x_shape, [tuple])
validator.check_integer("len of input_x shape", len(x_shape), 0, Rel.GT)
validator.check_subclass("shape0", x_type[0], mstype.tensor)
@ -1368,43 +1368,40 @@ def _get_stack_shape(x_shape, x_type, axis):
validator.check('x_type[%d]' % i, x_type[i], 'base', x_type[0])
for j in range(rank_base):
if v[j] != x_shape[0][j]:
raise ValueError("Stack evaluator element %d shape in input can not stack with first element" % i)
raise ValueError("Pack evaluator element %d shape in input can not pack with first element" % i)
out_shape.insert(axis, N)
return out_shape
class Stack(PrimitiveWithInfer):
class Pack(PrimitiveWithInfer):
r"""
Stacks a list of rank-`R` tensors into one rank-`(R+1)` tensor.
Packs a list of tensors in specified axis.
Packs the list of tensors in `input_x` into a tensor with rank one higher than
each tensor in `input_x`, by packing them along the `axis` dimension.
Given a list of length `N` of tensors of shape `(A, B, C)`;
Packs the list of input tensors with the same rank `R`, output is a tensor of rank `(R+1)`.
If `axis == 0` then the `output` tensor will have the shape `(N, A, B, C)`.
If `axis == 1` then the `output` tensor will have the shape `(A, N, B, C)`. Etc.
Given input tensors of shape :math:`(x_1, x_2, ..., x_R)`. Set the number of input tensors as `N`.
If :math:`0 \le axis`, the output tensor shape is :math:`(x_1, x_2, ..., x_{axis}, N, x_{axis+1}, ..., x_R)`.
Args:
axis (int): The axis to stack along. Negative values wrap around,
so the valid range is [-(R+1), R+1). Default: 0.
axis (int): Dimension along which to pack. Default: 0.
Negative values wrap around. The range is [-(R+1), R+1).
Inputs:
- **input_x** (Union[tuple, list]) - A Tuple or list of Tensor objects with the same shape and type.
Outputs:
Tensor. A stacked Tensor with the same type as values.
Tensor. A packed Tensor with the same type as `input_x`.
Examples:
>>> data1 = Tensor(np.array([0, 1]).astype(np.float32))
>>> data2 = Tensor(np.array([2, 3]).astype(np.float32))
>>> op = P.Stack()
>>> output = op([data1, data2])
>>> pack = P.Pack()
>>> output = pack([data1, data2])
[[0, 1], [2, 3]]
"""
@prim_attr_register
def __init__(self, axis=0):
"""init Stack"""
"""init Pack"""
self.__setattr_flag__ = True
validator.check_type("axis", axis, [int])
self.axis = axis
@ -1413,38 +1410,33 @@ class Stack(PrimitiveWithInfer):
x_shape = value['shape']
x_type = value['dtype']
self.add_prim_attr('num', len(x_shape))
all_shape = _get_stack_shape(x_shape, x_type, self.axis)
all_shape = _get_pack_shape(x_shape, x_type, self.axis)
out = {'shape': all_shape,
'dtype': x_type[0],
'value': None}
return out
class Unstack(PrimitiveWithInfer):
class Unpack(PrimitiveWithInfer):
r"""
Unpacks the given dimension of a rank-`R` tensor into rank-`(R-1)` tensors.
Unpacks tensor in specified axis.
Unpacks num tensors from value by chipping it along the axis dimension.
If num is not specified (the default), it is inferred from value's shape.
If value.shape[axis] is not known, ValueError is raised.
Unpacks a tensor of rank `R` along axis dimension, output tensors will have rank `(R-1)`.
For example, given a tensor of shape (A, B, C, D);
Given a tensor of shape :math:`(x_1, x_2, ..., x_R)`. If :math:`0 \le axis`,
the shape of tensor in output is :math:`(x_1, x_2, ..., x_{axis}, x_{axis+2}, ..., x_R)`.
If axis == 0 then the i'th tensor in output is the slice value[i, :, :, :] and
each tensor in output will have shape (B, C, D). (Note that the dimension unpacked along is gone, unlike split).
If axis == 1 then the i'th tensor in output is the slice value[:, i, :, :] and
each tensor in output will have shape (A, C, D). Etc.
This is the opposite of stack.
This is the opposite of pack.
Args:
axis (int): The axis to unstack along. Defaults to the first dimension.
Negative values wrap around, so the valid range is [-R, R).
axis (int): Dimension along which to pack. Default: 0.
Negative values wrap around. The range is [-R, R).
num (int): The number of tensors to be unpacked to. Default : "None".
If `num` is not specified, it is inferred from the shape of `input_x`.
Inputs:
- **input_x** (Tensor) - The shape is :math:`(x_1, x_2, ..., x_R)`.
A rank R > 0 Tensor to be unstacked.
A rank R > 0 Tensor to be unpacked.
Outputs:
A tuple of Tensors, the shape of each objects is same.
@ -1454,15 +1446,15 @@ class Unstack(PrimitiveWithInfer):
or if len(input_x.shape[axis]) not equal to num.
Examples:
>>> unstack = P.Unstack()
>>> x = Tensor(np.array([[1, 1, 1, 1], [2, 2, 2, 2]]))
>>> output = unstack(x)
>>> unpack = P.Unpack()
>>> input_x = Tensor(np.array([[1, 1, 1, 1], [2, 2, 2, 2]]))
>>> output = unpack(input_x)
([1, 1, 1, 1], [2, 2, 2, 2])
"""
@prim_attr_register
def __init__(self, axis=0):
"""init Unstack"""
"""init Unpack"""
self.__setattr_flag__ = True
validator.check_type("axis", axis, [int])
self.axis = axis
@ -1479,7 +1471,7 @@ class Unstack(PrimitiveWithInfer):
validator.check_integer("output_num", output_num, 0, Rel.GT)
self.add_prim_attr('num', output_num)
output_valid_check = x_shape[self.axis] - output_num
validator.check_integer("the dimension which to unstack divides output_num", output_valid_check, 0, Rel.EQ)
validator.check_integer("The dimension which to unpack divides output_num", output_valid_check, 0, Rel.EQ)
out_shapes = []
out_dtypes = []
out_shape = x_shape[:self.axis] + x_shape[self.axis + 1:]

View File

@ -80,9 +80,9 @@ class NetForConcat1(nn.Cell):
return self.concat((x1, x2))
class NetForStackInput(nn.Cell):
class NetForPackInput(nn.Cell):
def __init__(self, op):
super(NetForStackInput, self).__init__()
super(NetForPackInput, self).__init__()
self.op = op
self.mul = P.Mul()
@ -93,9 +93,9 @@ class NetForStackInput(nn.Cell):
return self.op(t)
class NetForUnstackInput(nn.Cell):
class NetForUnpackInput(nn.Cell):
def __init__(self, op):
super(NetForUnstackInput, self).__init__()
super(NetForUnpackInput, self).__init__()
self.op = op
self.mul = P.Mul()
@ -991,33 +991,33 @@ test_case_array_ops = [
Tensor(np.array([1], np.float32)),
Tensor(np.array([1], np.float32)))],
'desc_bprop': [[3,]]}),
('StackV2_0', {
'block': NetForStackInput(P.Stack()),
('Pack_0', {
'block': NetForPackInput(P.Pack()),
'desc_inputs':[[2, 2], [2, 2], [2, 2]],
'desc_bprop':[[3, 2, 2]],
}),
('StackV2_1', {
'block': NetForStackInput(P.Stack(axis=-2)),
('Pack_1', {
'block': NetForPackInput(P.Pack(axis=-2)),
'desc_inputs':[[3, 2, 3], [3, 2, 3], [3, 2, 3]],
'desc_bprop':[[3, 2, 3, 3]],
}),
('StackV2_2', {
'block': NetForStackInput(P.Stack()),
('Pack_2', {
'block': NetForPackInput(P.Pack()),
'desc_inputs':[[2, 2]],
'desc_bprop':[[2, 2, 2]],
}),
('StackV2_3', {
'block': NetForStackInput(P.Stack()),
('Pack_3', {
'block': NetForPackInput(P.Pack()),
'desc_inputs':[[128, 128], [128, 128]],
'desc_bprop':[[2, 128, 128]],
}),
('UnstackV2_0', {
'block': NetForUnstackInput(P.Unstack(axis=0)),
('Unpack_0', {
'block': NetForUnpackInput(P.Unpack(axis=0)),
'desc_inputs':[[2, 4]],
'desc_bprop':[[4], [4]],
}),
('UnstackV2_1', {
'block': NetForUnstackInput(P.Unstack(axis=-1)),
('Unpack_1', {
'block': NetForUnpackInput(P.Unpack(axis=-1)),
'desc_inputs':[Tensor(np.array([[1, 1, 1]], np.float32))],
'desc_bprop':[[1], [1], [1]],
}),