diff --git a/model_zoo/official/nlp/pangu_alpha/README.md b/model_zoo/official/nlp/pangu_alpha/README.md index 90f820200d4..6937e1eac4c 100644 --- a/model_zoo/official/nlp/pangu_alpha/README.md +++ b/model_zoo/official/nlp/pangu_alpha/README.md @@ -120,7 +120,7 @@ The vocab size of `vocab.vocab` is 40000, and the `eod id` is 6. ### Training On Ascend -Currently the scripts provide three default configures : `2.6B` `13B` and `200B`. The following command will start training `2.6B` model on 8 **Ascend cards**. +Currently the scripts provide three default configures : `2.6B` `13B` and `200B`. ```bash @@ -465,7 +465,7 @@ Please check the official [homepage](https://gitee.com/mindspore/mindspore/tree/ # [Requirements](#contents) -- mindspore 1.2.1 or higher version +- mindspore 1.3.0 - jieba 0.42.1 - sentencepiece 0.1.94 - transformers >= 4.7.0 diff --git a/model_zoo/official/nlp/pangu_alpha/train.py b/model_zoo/official/nlp/pangu_alpha/train.py index 7834cd682a6..bca1e00ac19 100644 --- a/model_zoo/official/nlp/pangu_alpha/train.py +++ b/model_zoo/official/nlp/pangu_alpha/train.py @@ -83,7 +83,7 @@ def run_train(args_opt): # Set execution mode context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.device_target) - context.set_context(variable_memory_max_size="30GB") + context.set_context(variable_memory_max_size="31GB") # Set parallel context if args_opt.distribute == "true": D.init()