Fix the docs for its format problems

This commit is contained in:
dinglinhe 2021-09-03 11:11:09 +08:00
parent 54cd78d25c
commit f6f445a30a
3 changed files with 6 additions and 8 deletions

View File

@ -175,8 +175,7 @@ class FixedLossScaleUpdateCell(Cell):
Examples: Examples:
>>> import numpy as np >>> import numpy as np
>>> from mindspore import Tensor, Parameter, nn >>> from mindspore import Tensor, Parameter, nn, ops
>>> from mindspore.ops as ops
>>> >>>
>>> class Net(nn.Cell): >>> class Net(nn.Cell):
... def __init__(self, in_features, out_features): ... def __init__(self, in_features, out_features):
@ -248,8 +247,7 @@ class TrainOneStepWithLossScaleCell(TrainOneStepCell):
Examples: Examples:
>>> import numpy as np >>> import numpy as np
>>> from mindspore import Tensor, Parameter, nn >>> from mindspore import Tensor, Parameter, nn, ops
>>> from mindspore.ops as ops
>>> from mindspore import dtype as mstype >>> from mindspore import dtype as mstype
>>> >>>
>>> class Net(nn.Cell): >>> class Net(nn.Cell):

View File

@ -143,7 +143,7 @@ class Primitive(Primitive_):
Args: Args:
stage (int): The stage id for the current operation. stage (int): The stage id for the current operation.
Examples: Examples:
>>> from mindspore.ops as ops >>> from mindspore import ops
>>> add = ops.Add() >>> add = ops.Add()
>>> print(add.set_stage(0)) >>> print(add.set_stage(0))
Prim[Add]<stage=0> Prim[Add]<stage=0>
@ -162,7 +162,7 @@ class Primitive(Primitive_):
Args: Args:
strategy (tuple): Strategy describes the distributed parallel mode of the current primitive. strategy (tuple): Strategy describes the distributed parallel mode of the current primitive.
Examples: Examples:
>>> from mindspore.ops as ops >>> from mindspore import ops
>>> add = ops.Add() >>> add = ops.Add()
>>> print(add.shard(((1, 1), (1, 1)))) >>> print(add.shard(((1, 1), (1, 1))))
Prim[Add]<strategy=((1, 1), (1, 1))> Prim[Add]<strategy=((1, 1), (1, 1))>