diff --git a/mindspore/nn/layer/lstm.py b/mindspore/nn/layer/lstm.py index c738b95a253..9c988560494 100755 --- a/mindspore/nn/layer/lstm.py +++ b/mindspore/nn/layer/lstm.py @@ -343,6 +343,8 @@ class LSTMCell(Cell): >>> c = Tensor(np.ones([1, 3, 12]).astype(np.float32)) >>> w = Tensor(np.ones([1152, 1, 1]).astype(np.float32)) >>> output, h, c, _, _ = net(input, h, c, w) + >>> print(output.shape) + (3, 5, 12) """ def __init__(self, diff --git a/mindspore/ops/composite/array_ops.py b/mindspore/ops/composite/array_ops.py index cfc00546d43..556528571e9 100644 --- a/mindspore/ops/composite/array_ops.py +++ b/mindspore/ops/composite/array_ops.py @@ -59,10 +59,10 @@ def repeat_elements(x, rep, axis=0): Repeat elements of a tensor along an axis, like np.repeat. Args: - - **x** (Tensor) - The tensor to repeat values for. Must be of type: float16, + x (Tensor): The tensor to repeat values for. Must be of type: float16, float32, int8, uint8, int16, int32, or int64. - - **rep** (int) - The number of times to repeat, must be positive, required. - - **axis** (int) - The axis along which to repeat, default 0. + rep (int): The number of times to repeat, must be positive, required. + axis (int): The axis along which to repeat, default 0. Outputs: One tensor with values repeated along the specified axis. If x has shape diff --git a/mindspore/ops/operations/comm_ops.py b/mindspore/ops/operations/comm_ops.py index c7e46fdf53c..6c4d36d82e0 100644 --- a/mindspore/ops/operations/comm_ops.py +++ b/mindspore/ops/operations/comm_ops.py @@ -142,16 +142,19 @@ class AllGather(PrimitiveWithInfer): ``Ascend`` ``GPU`` Examples: + >>> # This example should be run with two devices. Refer to the tutorial > Distirbuted Training on mindspore.cn. + >>> import numpy as np >>> import mindspore.ops.operations as ops >>> import mindspore.nn as nn >>> from mindspore.communication import init - >>> from mindspore import Tensor + >>> from mindspore import Tensor, context >>> + >>> context.set_context(mode=context.GRAPH_MODE) >>> init() ... class Net(nn.Cell): ... def __init__(self): ... super(Net, self).__init__() - ... self.allgather = ops.AllGather(group="nccl_world_group") + ... self.allgather = ops.AllGather() ... ... def construct(self, x): ... return self.allgather(x) @@ -160,6 +163,10 @@ class AllGather(PrimitiveWithInfer): >>> net = Net() >>> output = net(input_) >>> print(output) + [[1. 1. 1. 1. 1. 1. 1. 1.] + [1. 1. 1. 1. 1. 1. 1. 1.] + [1. 1. 1. 1. 1. 1. 1. 1.] + [1. 1. 1. 1. 1. 1. 1. 1.]] """ @prim_attr_register @@ -255,16 +262,18 @@ class ReduceScatter(PrimitiveWithInfer): ValueError: If the first dimension of the input cannot be divided by the rank size. Supported Platforms: - ``GPU`` + ``Ascend`` ``GPU`` Examples: - >>> from mindspore import Tensor + >>> # This example should be run with two devices. Refer to the tutorial > Distirbuted Training on mindspore.cn. + >>> from mindspore import Tensor, context >>> from mindspore.communication import init >>> from mindspore.ops.operations.comm_ops import ReduceOp >>> import mindspore.nn as nn >>> import mindspore.ops.operations as ops >>> import numpy as np >>> + >>> context.set_context(mode=context.GRAPH_MODE) >>> init() >>> class Net(nn.Cell): ... def __init__(self): @@ -278,6 +287,10 @@ class ReduceScatter(PrimitiveWithInfer): >>> net = Net() >>> output = net(input_) >>> print(output) + [[2. 2. 2. 2. 2. 2. 2. 2.] + [2. 2. 2. 2. 2. 2. 2. 2.] + [2. 2. 2. 2. 2. 2. 2. 2.] + [2. 2. 2. 2. 2. 2. 2. 2.]] """ @prim_attr_register diff --git a/mindspore/ops/operations/control_ops.py b/mindspore/ops/operations/control_ops.py index d7eb00c6e1e..053f4eea5c9 100644 --- a/mindspore/ops/operations/control_ops.py +++ b/mindspore/ops/operations/control_ops.py @@ -34,7 +34,7 @@ class ControlDepend(Primitive): This operation does not work in `PYNATIVE_MODE`. Args: depend_mode (int): Use 0 for a normal dependency relation and 1 for a user-defined dependency relation. - Default: 0. + Default: 0. Inputs: - **src** (Any) - The source input. It can be a tuple of operations output or a single operation output. We do @@ -102,7 +102,7 @@ class GeSwitch(PrimitiveWithInfer): Examples: >>> class Net(nn.Cell): - ... def __init__(self): + ... def __init__(self): ... super(Net, self).__init__() ... self.square = ops.Square() ... self.add = ops.TensorAdd() diff --git a/mindspore/ops/operations/debug_ops.py b/mindspore/ops/operations/debug_ops.py index f10291df510..f575b47377b 100644 --- a/mindspore/ops/operations/debug_ops.py +++ b/mindspore/ops/operations/debug_ops.py @@ -350,7 +350,12 @@ class Print(PrimitiveWithInfer): >>> x = Tensor(np.ones([2, 1]).astype(np.int32)) >>> y = Tensor(np.ones([2, 2]).astype(np.int32)) >>> net = PrintDemo() - >>> output = net(x, y) + >>> result = net(x, y) + Print Tensor x and Tensor y: + [[1] + [1]] + [[1 1] + [1 1]] """ @prim_attr_register