!10191 update the example of some operations.

From: @wangshuide2020
Reviewed-by: @liangchenghui,@wuxuejian
Signed-off-by: @liangchenghui
This commit is contained in:
mindspore-ci-bot 2020-12-21 17:23:44 +08:00 committed by Gitee
commit 2e65c5de5c
5 changed files with 30 additions and 10 deletions

View File

@ -343,6 +343,8 @@ class LSTMCell(Cell):
>>> c = Tensor(np.ones([1, 3, 12]).astype(np.float32))
>>> w = Tensor(np.ones([1152, 1, 1]).astype(np.float32))
>>> output, h, c, _, _ = net(input, h, c, w)
>>> print(output.shape)
(3, 5, 12)
"""
def __init__(self,

View File

@ -59,10 +59,10 @@ def repeat_elements(x, rep, axis=0):
Repeat elements of a tensor along an axis, like np.repeat.
Args:
- **x** (Tensor) - The tensor to repeat values for. Must be of type: float16,
x (Tensor): The tensor to repeat values for. Must be of type: float16,
float32, int8, uint8, int16, int32, or int64.
- **rep** (int) - The number of times to repeat, must be positive, required.
- **axis** (int) - The axis along which to repeat, default 0.
rep (int): The number of times to repeat, must be positive, required.
axis (int): The axis along which to repeat, default 0.
Outputs:
One tensor with values repeated along the specified axis. If x has shape

View File

@ -142,16 +142,19 @@ class AllGather(PrimitiveWithInfer):
``Ascend`` ``GPU``
Examples:
>>> # This example should be run with two devices. Refer to the tutorial > Distirbuted Training on mindspore.cn.
>>> import numpy as np
>>> import mindspore.ops.operations as ops
>>> import mindspore.nn as nn
>>> from mindspore.communication import init
>>> from mindspore import Tensor
>>> from mindspore import Tensor, context
>>>
>>> context.set_context(mode=context.GRAPH_MODE)
>>> init()
... class Net(nn.Cell):
... def __init__(self):
... super(Net, self).__init__()
... self.allgather = ops.AllGather(group="nccl_world_group")
... self.allgather = ops.AllGather()
...
... def construct(self, x):
... return self.allgather(x)
@ -160,6 +163,10 @@ class AllGather(PrimitiveWithInfer):
>>> net = Net()
>>> output = net(input_)
>>> print(output)
[[1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1.]]
"""
@prim_attr_register
@ -255,16 +262,18 @@ class ReduceScatter(PrimitiveWithInfer):
ValueError: If the first dimension of the input cannot be divided by the rank size.
Supported Platforms:
``GPU``
``Ascend`` ``GPU``
Examples:
>>> from mindspore import Tensor
>>> # This example should be run with two devices. Refer to the tutorial > Distirbuted Training on mindspore.cn.
>>> from mindspore import Tensor, context
>>> from mindspore.communication import init
>>> from mindspore.ops.operations.comm_ops import ReduceOp
>>> import mindspore.nn as nn
>>> import mindspore.ops.operations as ops
>>> import numpy as np
>>>
>>> context.set_context(mode=context.GRAPH_MODE)
>>> init()
>>> class Net(nn.Cell):
... def __init__(self):
@ -278,6 +287,10 @@ class ReduceScatter(PrimitiveWithInfer):
>>> net = Net()
>>> output = net(input_)
>>> print(output)
[[2. 2. 2. 2. 2. 2. 2. 2.]
[2. 2. 2. 2. 2. 2. 2. 2.]
[2. 2. 2. 2. 2. 2. 2. 2.]
[2. 2. 2. 2. 2. 2. 2. 2.]]
"""
@prim_attr_register

View File

@ -34,7 +34,7 @@ class ControlDepend(Primitive):
This operation does not work in `PYNATIVE_MODE`.
Args:
depend_mode (int): Use 0 for a normal dependency relation and 1 for a user-defined dependency relation.
Default: 0.
Default: 0.
Inputs:
- **src** (Any) - The source input. It can be a tuple of operations output or a single operation output. We do
@ -102,7 +102,7 @@ class GeSwitch(PrimitiveWithInfer):
Examples:
>>> class Net(nn.Cell):
... def __init__(self):
... def __init__(self):
... super(Net, self).__init__()
... self.square = ops.Square()
... self.add = ops.TensorAdd()

View File

@ -350,7 +350,12 @@ class Print(PrimitiveWithInfer):
>>> x = Tensor(np.ones([2, 1]).astype(np.int32))
>>> y = Tensor(np.ones([2, 2]).astype(np.int32))
>>> net = PrintDemo()
>>> output = net(x, y)
>>> result = net(x, y)
Print Tensor x and Tensor y:
[[1]
[1]]
[[1 1]
[1 1]]
"""
@prim_attr_register