This commit is contained in:
huchunmei 2021-07-03 17:33:47 +08:00
parent 28991939bc
commit 96955632f2
11 changed files with 18 additions and 16 deletions

View File

@ -33,7 +33,7 @@ global_step: 0
# MindSpore LSTM Example - train.py
preprocess: 'false'
aclimdb_path: "/cache/data/aclImdb"
aclimdb_path: "./aclImdb"
glove_path: "/cache/data"
preprocess_path: "/cache/train/preprocess"
ckpt_path: './ckpt_lstm/'

View File

@ -34,7 +34,7 @@ global_step: 0
# MindSpore LSTM Example - train.py
preprocess: 'false'
aclimdb_path: "/cache/data/aclImdb"
aclimdb_path: "./aclImdb"
glove_path: "/cache/data"
preprocess_path: "/cache/train/preprocess"
ckpt_path: './ckpt_lstm/'

View File

@ -28,7 +28,7 @@ keep_checkpoint_max: 10
# MindSpore LSTM Example - train.py
preprocess: 'false'
aclimdb_path: "/cache/data/aclImdb"
aclimdb_path: "./aclImdb"
glove_path: "/cache/data"
preprocess_path: "/cache/train/preprocess"
ckpt_path: './ckpt_lstm/'

View File

@ -39,6 +39,6 @@ CONFIG_FILE="${BASE_PATH}/../../config_ascend.yaml"
python ../../eval.py \
--config_path=$CONFIG_FILE \
--device_target="Ascend" \
--preprocess=true \
--glove_path=$PREPROCESS_DIR \
--preprocess=false \
--preprocess_path=$PREPROCESS_DIR \
--ckpt_file=$CKPT_FILE > log.txt 2>&1 &

View File

@ -37,6 +37,6 @@ python ../eval.py \
--device_target="CPU" \
--aclimdb_path=$ACLIMDB_DIR \
--glove_path=$GLOVE_DIR \
--preprocess=true \
--preprocess=false \
--preprocess_path=./preprocess \
--ckpt_file=$CKPT_FILE > log.txt 2>&1 &

View File

@ -40,6 +40,6 @@ python ../eval.py \
--device_target="GPU" \
--aclimdb_path=$ACLIMDB_DIR \
--glove_path=$GLOVE_DIR \
--preprocess=true \
--preprocess=false \
--preprocess_path=./preprocess \
--ckpt_file=$CKPT_FILE > log.txt 2>&1 &

View File

@ -104,7 +104,7 @@ def test_train_eval(config):
def modelarts_pre_process():
config.ckpt_path = config.output_path
cfg.ckpt_path = cfg.output_path
@moxing_wrapper(pre_process=modelarts_pre_process)
def train_wide_and_deep():

View File

@ -144,7 +144,7 @@ def train_and_eval(config):
def modelarts_pre_process():
config.ckpt_path = config.output_path
cfg.ckpt_path = cfg.output_path
@moxing_wrapper(pre_process=modelarts_pre_process)
def train_wide_and_deep():

View File

@ -118,7 +118,7 @@ def train_and_eval(config):
def modelarts_pre_process():
config.ckpt_path = config.output_path
cfg.ckpt_path = cfg.output_path
@moxing_wrapper(pre_process=modelarts_pre_process)
def train_wide_and_deep():

View File

@ -145,13 +145,14 @@ def train_and_eval(config):
def modelarts_pre_process():
config.ckpt_path = config.output_path
cfg.ckpt_path = cfg.output_path
context.set_context(mode=context.GRAPH_MODE, device_target=cfg.device_target, save_graphs=True)
cache_enable = cfg.vocab_cache_size > 0
@moxing_wrapper(pre_process=modelarts_pre_process)
def train_wide_and_deep():
""" train_wide_and_deep """
context.set_context(mode=context.GRAPH_MODE, device_target=cfg.device_target, save_graphs=True)
cache_enable = cfg.vocab_cache_size > 0
if cache_enable and cfg.device_target != "GPU":
context.set_context(variable_memory_max_size="24GB")
context.set_ps_context(enable_ps=True)

View File

@ -117,13 +117,14 @@ def train_and_eval(config):
def modelarts_pre_process():
config.ckpt_path = config.output_path
cfg.ckpt_path = cfg.output_path
context.set_context(mode=context.GRAPH_MODE, device_target=cfg.device_target, save_graphs=True)
cache_enable = cfg.vocab_cache_size > 0
@moxing_wrapper(pre_process=modelarts_pre_process)
def train_wide_and_deep():
""" train_wide_and_deep """
context.set_context(mode=context.GRAPH_MODE, device_target=cfg.device_target, save_graphs=True)
cache_enable = cfg.vocab_cache_size > 0
if not cache_enable:
cfg.sparse = True
if cfg.sparse: