forked from mindspore-Ecosystem/mindspore
!469 add comment for export lite model
Merge pull request !469 from yangjie159/add-comment-of-export-lite-model
This commit is contained in:
commit
b554a86832
|
@ -487,8 +487,8 @@ def set_context(**kwargs):
|
|||
enable_loop_sink (bool): Whether to enable loop sink. Default: False.
|
||||
enable_task_sink (bool): Whether to enable task sink. Default: True.
|
||||
enable_mem_reuse (bool): Whether to enable memory reuse. Default: True.
|
||||
save_ms_model (bool): Whether to save model converted by graph. Default: False.
|
||||
save_ms_model_path (str): Path to save converted model. Default: "."
|
||||
save_ms_model (bool): Whether to save lite model converted by graph. Default: False.
|
||||
save_ms_model_path (str): Path to save converted lite model. Default: "."
|
||||
enable_gpu_summary (bool): Whether to enable gpu summary. Default: True.
|
||||
save_graphs_path (str): Path to save graphs. Default: "."
|
||||
enable_auto_mixed_precision (bool): Whether to enable auto mixed precision. Default: True.
|
||||
|
|
|
@ -426,7 +426,7 @@ def export(net, *inputs, file_name, file_format='GEIR'):
|
|||
- GEIR: Graph Engine Intermidiate Representation. An intermidiate representation format of
|
||||
Ascend model.
|
||||
- ONNX: Open Neural Network eXchange. An open format built to represent machine learning models.
|
||||
- LITE: Huawei model format for mobile.
|
||||
- LITE: Huawei model format for mobile. A lite model only for the MindSpore Lite
|
||||
"""
|
||||
logger.info("exporting model file:%s format:%s.", file_name, file_format)
|
||||
check_input_data(*inputs, data_class=Tensor)
|
||||
|
|
Loading…
Reference in New Issue