!26443 modify parallel api comments

Merge pull request !26443 from yangzhenzhang/modify-parallel-api-comments
This commit is contained in:
i-robot 2021-11-18 02:29:18 +00:00 committed by Gitee
commit 1fa51c41b8
2 changed files with 9 additions and 14 deletions

View File

@ -1392,12 +1392,10 @@ class Cell(Cell_):
def set_broadcast_flag(self, mode=True):
"""
Set the cell to data_parallel mode.
The cell can be accessed as an attribute using the given name.
Set parameter broadcast mode for this cell.
Args:
mode (bool): Specifies whether the model is data_parallel. Default: True.
mode (bool): Specifies whether the mode is parameter broadcast. Default: True.
"""
self.add_flags_recursive(broadcast_flag=mode)
return self
@ -1470,7 +1468,7 @@ class Cell(Cell_):
def set_comm_fusion(self, fusion_type, recurse=True):
"""
Set `comm_fusion` for all the parameters in the Net. Please refer to the description of
Set `comm_fusion` for all the parameters in this cell. Please refer to the description of
`mindspore.common.parameter.comm_fusion`.
Note:

View File

@ -1204,9 +1204,8 @@ def ckpt_restore_group_info(group_info_file_name):
def build_searched_strategy(strategy_filename):
"""
Build strategy of every parameter in network. Used in the case of distributed inference.
For details of merge_sliced_parameter, please check:
`Enabling Graph-Accounting Convergence <https://www.mindspore.cn/docs/programming_guide
/en/master/save_load_model_hybrid_parallel.html>`_.
For details of it, please check:
`<https://www.mindspore.cn/docs/programming_guide/en/master/save_load_model_hybrid_parallel.html>`.
Args:
strategy_filename (str): Name of strategy file.
@ -1252,12 +1251,11 @@ def build_searched_strategy(strategy_filename):
def merge_sliced_parameter(sliced_parameters, strategy=None):
"""
Merge parameter slices into one parameter. Used in the case of distributed inference.
For details of merge_sliced_parameter, please check:
`Enabling Graph-Accounting Convergence <https://www.mindspore.cn/docs/programming_guide
/en/master/save_load_model_hybrid_parallel.html>`_.
For details of it, please check:
`<https://www.mindspore.cn/docs/programming_guide/en/master/save_load_model_hybrid_parallel.html>`.
Args:
sliced_parameters (list[Parameter]): Parameter slices in order of rank_id.
sliced_parameters (list[Parameter]): Parameter slices in order of rank id.
strategy (Optional[dict]): Parameter slice strategy, whose key is parameter name and
value is slice strategy of this parameter. If strategy is None, just merge
parameter slices in 0 axis order. Default: None.
@ -1341,8 +1339,7 @@ def load_distributed_checkpoint(network, checkpoint_filenames, predict_strategy=
"""
Load checkpoint into net for distributed predication. Used in the case of distributed inference.
For details of distributed inference, please check:
`Enabling Graph-Accounting Convergence <https://www.mindspore.cn/docs/programming_guide
/en/master/distributed_inference.html>`_.
`<https://www.mindspore.cn/docs/programming_guide/en/master/distributed_inference.html>`.
Args:
network (Cell): Network for distributed predication.