Fix document

This commit is contained in:
huangxinjing 2021-07-08 17:09:08 +08:00
parent 1ac696d044
commit 1fd90d1f37
2 changed files with 11 additions and 2 deletions

View File

@ -291,7 +291,7 @@ class DistributedGradReducer(Cell):
ValueError: If degree is not a int or less than 0.
Supported Platforms:
``Ascend``, ``GPU``
``Ascend`` ``GPU``
Examples:
>>> # This example should be run with multiple processes.

View File

@ -140,9 +140,13 @@ class Primitive(Primitive_):
Note:
It is valid only in semi auto parallel.
In other parallel modes, please set it to be 0.
Args:
stage (int): The stage id for the current operation.
Example:
>>> from mindspore.ops import operations as P
>>> add = P.Add()
>>> print(add.set_stage(0))
Prim[Add]<stage=0>
"""
self.add_prim_attr("stage", stage)
return self
@ -157,6 +161,11 @@ class Primitive(Primitive_):
Args:
strategy (tuple): Strategy describes the distributed parallel mode of the current primitive.
Example:
>>> from mindspore.ops import operations as P
>>> add = P.Add()
>>> print(add.shard(((1, 1), (1, 1))))
Prim[Add]<strategy=((1, 1), (1, 1))>
"""
mode = context.get_auto_parallel_context("parallel_mode")
if strategy is not None: