forked from mindspore-Ecosystem/mindspore
replace tab with spaces
This commit is contained in:
parent
57f6c17933
commit
923b53d2e6
|
@ -29,7 +29,7 @@ class CropAndResize(PrimitiveWithInfer):
|
|||
In case that the output shape depends on crop_size, the crop_size must be constant.
|
||||
|
||||
Args:
|
||||
method (str): An optional string that specifies the sampling method for resizing.
|
||||
method (str): An optional string that specifies the sampling method for resizing.
|
||||
It can be "bilinear", "nearest" or "bilinear_v2". The option "bilinear" stands for standard bilinear
|
||||
interpolation algorithm, while "bilinear_v2" may result in better result in some cases. Default: "bilinear"
|
||||
extrapolation_value (float): An optional float value used extrapolation, if applicable. Default: 0.
|
||||
|
|
|
@ -16,16 +16,16 @@
|
|||
|
||||
if [ $# != 2 ] && [ $# != 1 ]
|
||||
then
|
||||
echo "Usage: sh run_distribute_train.sh [RANK_TABLE_FILE] [PRETRAINED_PATH](optional)"
|
||||
exit 1
|
||||
echo "Usage: sh run_distribute_train.sh [RANK_TABLE_FILE] [PRETRAINED_PATH](optional)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
get_real_path(){
|
||||
if [ "${1:0:1}" == "/" ]; then
|
||||
echo "$1"
|
||||
else
|
||||
echo "$(realpath -m $PWD/$1)"
|
||||
fi
|
||||
if [ "${1:0:1}" == "/" ]; then
|
||||
echo "$1"
|
||||
else
|
||||
echo "$(realpath -m $PWD/$1)"
|
||||
fi
|
||||
}
|
||||
PATH1=$(get_real_path $1)
|
||||
PATH2=$2
|
||||
|
@ -40,7 +40,7 @@ fi
|
|||
if [ ! -f $PATH1 ]
|
||||
then
|
||||
echo "error: RANK_TABLE_FILE=$PATH1 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ulimit -u unlimited
|
||||
|
|
|
@ -16,16 +16,16 @@
|
|||
|
||||
if [ $# != 1 ] && [ $# != 0 ]
|
||||
then
|
||||
echo "Usage: sh run_standalone_train.sh [PRETRAINED_PATH](optional)"
|
||||
exit 1
|
||||
echo "Usage: sh run_standalone_train.sh [PRETRAINED_PATH](optional)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
get_real_path(){
|
||||
if [ "${1:0:1}" == "/" ]; then
|
||||
echo "$1"
|
||||
else
|
||||
echo "$(realpath -m $PWD/$1)"
|
||||
fi
|
||||
if [ "${1:0:1}" == "/" ]; then
|
||||
echo "$1"
|
||||
else
|
||||
echo "$(realpath -m $PWD/$1)"
|
||||
fi
|
||||
}
|
||||
|
||||
if [ $# == 1 ]
|
||||
|
|
|
@ -17,13 +17,13 @@
|
|||
if [ $# != 3 ] && [ $# != 4 ]
|
||||
then
|
||||
echo "Usage: sh run_distribute_train.sh [cifar10|imagenet2012] [RANK_TABLE_FILE] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $1 != "cifar10" ] && [ $1 != "imagenet2012" ]
|
||||
then
|
||||
echo "error: the selected dataset is neither cifar10 nor imagenet2012"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
get_real_path(){
|
||||
|
@ -45,19 +45,19 @@ fi
|
|||
if [ ! -f $PATH1 ]
|
||||
then
|
||||
echo "error: RANK_TABLE_FILE=$PATH1 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d $PATH2 ]
|
||||
then
|
||||
echo "error: DATASET_PATH=$PATH2 is not a directory"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $# == 4 ] && [ ! -f $PATH3 ]
|
||||
then
|
||||
echo "error: PRETRAINED_CKPT_PATH=$PATH3 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ulimit -u unlimited
|
||||
|
@ -81,7 +81,7 @@ do
|
|||
echo "start training for rank $RANK_ID, device $DEVICE_ID"
|
||||
env > env.log
|
||||
if [ $# == 3 ]
|
||||
then
|
||||
then
|
||||
python train.py --dataset=$1 --run_distribute=True --device_num=$DEVICE_NUM --dataset_path=$PATH2 &> log &
|
||||
fi
|
||||
|
||||
|
|
|
@ -119,7 +119,7 @@ run_ascend(){
|
|||
echo "start training for rank $RANK_ID, device $DEVICE_ID"
|
||||
env > env.log
|
||||
if [ $# == 3 ]
|
||||
then
|
||||
then
|
||||
python train.py --device_target=$1 --dataset_path=$PATH2 &> train.log &
|
||||
fi
|
||||
|
||||
|
|
|
@ -16,8 +16,8 @@
|
|||
|
||||
if [ $# != 4 ] && [ $# != 5 ]
|
||||
then
|
||||
echo "Usage: sh run_distribute_train.sh [resnet50|resnet101|se-resnet50] [cifar10|imagenet2012] [RANK_TABLE_FILE] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
echo "Usage: sh run_distribute_train.sh [resnet50|resnet101|se-resnet50] [cifar10|imagenet2012] [RANK_TABLE_FILE] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $1 != "resnet50" ] && [ $1 != "resnet101" ] && [ $1 != "se-resnet50" ]
|
||||
|
@ -99,7 +99,7 @@ do
|
|||
echo "start training for rank $RANK_ID, device $DEVICE_ID"
|
||||
env > env.log
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
then
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=$DEVICE_NUM --dataset_path=$PATH2 &> log &
|
||||
fi
|
||||
|
||||
|
|
|
@ -16,26 +16,26 @@
|
|||
|
||||
if [ $# != 3 ] && [ $# != 4 ]
|
||||
then
|
||||
echo "Usage: sh run_distribute_train_gpu.sh [resnet50|resnet101] [cifar10|imagenet2012] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
echo "Usage: sh run_distribute_train_gpu.sh [resnet50|resnet101] [cifar10|imagenet2012] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $1 != "resnet50" ] && [ $1 != "resnet101" ]
|
||||
then
|
||||
echo "error: the selected net is neither resnet50 nor resnet101"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $2 != "cifar10" ] && [ $2 != "imagenet2012" ]
|
||||
then
|
||||
echo "error: the selected dataset is neither cifar10 nor imagenet2012"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $1 == "resnet101" ] && [ $2 == "cifar10" ]
|
||||
then
|
||||
echo "error: training resnet101 with cifar10 dataset is unsupported now!"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
|
@ -58,13 +58,13 @@ fi
|
|||
if [ ! -d $PATH1 ]
|
||||
then
|
||||
echo "error: DATASET_PATH=$PATH1 is not a directory"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $# == 5 ] && [ ! -f $PATH2 ]
|
||||
then
|
||||
echo "error: PRETRAINED_CKPT_PATH=$PATH2 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ulimit -u unlimited
|
||||
|
@ -79,15 +79,15 @@ cp -r ../src ./train_parallel
|
|||
cd ./train_parallel || exit
|
||||
|
||||
if [ $# == 3 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 &> log &
|
||||
then
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 &> log &
|
||||
fi
|
||||
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --pre_trained=$PATH2 &> log &
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --pre_trained=$PATH2 &> log &
|
||||
fi
|
||||
|
|
|
@ -16,10 +16,10 @@
|
|||
|
||||
if [ $# != 1 ] && [ $# != 2 ] && [ $# != 3 ] && [ $# != 4 ] && [ $# != 5 ]
|
||||
then
|
||||
echo "Usage: sh run_eval_gpu_resnet_benchmark.sh [DATASET_PATH] [CKPT_PATH] [BATCH_SIZE](optional) \
|
||||
[DTYPE](optional)"
|
||||
echo "Example: sh run_eval_gpu_resnet_benchmark.sh /path/imagenet/train /path/ckpt 256 FP16"
|
||||
exit 1
|
||||
echo "Usage: sh run_eval_gpu_resnet_benchmark.sh [DATASET_PATH] [CKPT_PATH] [BATCH_SIZE](optional) \
|
||||
[DTYPE](optional)"
|
||||
echo "Example: sh run_eval_gpu_resnet_benchmark.sh /path/imagenet/train /path/ckpt 256 FP16"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
get_real_path(){
|
||||
|
|
|
@ -16,10 +16,10 @@
|
|||
|
||||
if [ $# != 1 ] && [ $# != 2 ] && [ $# != 3 ] && [ $# != 4 ] && [ $# != 5 ]
|
||||
then
|
||||
echo "Usage: sh run_gpu_resnet_benchmark.sh [DATASET_PATH] [BATCH_SIZE](optional) [DTYPE](optional)\
|
||||
[DEVICE_NUM](optional) [SAVE_CKPT](optional) [SAVE_PATH](optional)"
|
||||
echo "Example: sh run_gpu_resnet_benchmark.sh /path/imagenet/train 256 FP16 8 true /path/ckpt"
|
||||
exit 1
|
||||
echo "Usage: sh run_gpu_resnet_benchmark.sh [DATASET_PATH] [BATCH_SIZE](optional) [DTYPE](optional)\
|
||||
[DEVICE_NUM](optional) [SAVE_CKPT](optional) [SAVE_PATH](optional)"
|
||||
echo "Example: sh run_gpu_resnet_benchmark.sh /path/imagenet/train 256 FP16 8 true /path/ckpt"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
get_real_path(){
|
||||
|
|
|
@ -16,26 +16,26 @@
|
|||
|
||||
if [ $# != 4 ] && [ $# != 5 ]
|
||||
then
|
||||
echo "Usage: sh run_distribute_train.sh [resnet50|resnet101] [cifar10|imagenet2012] [RANK_TABLE_FILE] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
echo "Usage: sh run_distribute_train.sh [resnet50|resnet101] [cifar10|imagenet2012] [RANK_TABLE_FILE] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $1 != "resnet50" ] && [ $1 != "resnet101" ]
|
||||
then
|
||||
echo "error: the selected net is neither resnet50 nor resnet101"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $2 != "cifar10" ] && [ $2 != "imagenet2012" ]
|
||||
then
|
||||
echo "error: the selected dataset is neither cifar10 nor imagenet2012"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $1 == "resnet101" ] && [ $2 == "cifar10" ]
|
||||
then
|
||||
echo "error: training resnet101 with cifar10 dataset is unsupported now!"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
|
@ -58,19 +58,19 @@ fi
|
|||
if [ ! -f $PATH1 ]
|
||||
then
|
||||
echo "error: RANK_TABLE_FILE=$PATH1 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d $PATH2 ]
|
||||
then
|
||||
echo "error: DATASET_PATH=$PATH2 is not a directory"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $# == 5 ] && [ ! -f $PATH3 ]
|
||||
then
|
||||
echo "error: PRETRAINED_CKPT_PATH=$PATH3 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ulimit -u unlimited
|
||||
|
@ -96,7 +96,7 @@ cp -r ../src ./sched
|
|||
cd ./sched || exit
|
||||
echo "start scheduler"
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
then
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=1 --dataset_path=$PATH2 --parameter_server=True &> sched.log &
|
||||
fi
|
||||
|
||||
|
@ -119,7 +119,7 @@ do
|
|||
cd ./server_$i || exit
|
||||
echo "start server"
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
then
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=1 --dataset_path=$PATH2 --parameter_server=True &> server_$i.log &
|
||||
fi
|
||||
|
||||
|
@ -145,7 +145,7 @@ do
|
|||
echo "start training for worker rank $RANK_ID, device $DEVICE_ID"
|
||||
env > env.log
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
then
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=$DEVICE_NUM --dataset_path=$PATH2 --parameter_server=True &> worker_$i.log &
|
||||
fi
|
||||
|
||||
|
|
|
@ -16,26 +16,26 @@
|
|||
|
||||
if [ $# != 3 ] && [ $# != 4 ]
|
||||
then
|
||||
echo "Usage: sh run_distribute_train_gpu.sh [resnet50|resnet101] [cifar10|imagenet2012] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
echo "Usage: sh run_distribute_train_gpu.sh [resnet50|resnet101] [cifar10|imagenet2012] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $1 != "resnet50" ] && [ $1 != "resnet101" ]
|
||||
then
|
||||
echo "error: the selected net is neither resnet50 nor resnet101"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $2 != "cifar10" ] && [ $2 != "imagenet2012" ]
|
||||
then
|
||||
echo "error: the selected dataset is neither cifar10 nor imagenet2012"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $1 == "resnet101" ] && [ $2 == "cifar10" ]
|
||||
then
|
||||
echo "error: training resnet101 with cifar10 dataset is unsupported now!"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
|
@ -58,13 +58,13 @@ fi
|
|||
if [ ! -d $PATH2 ]
|
||||
then
|
||||
echo "error: DATASET_PATH=$PATH1 is not a directory"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $# == 5 ] && [ ! -f $PATH2 ]
|
||||
then
|
||||
echo "error: PRETRAINED_CKPT_PATH=$PATH2 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export DEVICE_NUM=8
|
||||
|
@ -85,44 +85,43 @@ cp *.sh ./sched
|
|||
cp -r ../src ./sched
|
||||
cd ./sched || exit
|
||||
if [ $# == 3 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n 1 --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True &> sched.log &
|
||||
then
|
||||
mpirun --allow-run-as-root -n 1 --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True &> sched.log &
|
||||
fi
|
||||
|
||||
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n 1 --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True --pre_trained=$PATH2 &> sched.log &
|
||||
mpirun --allow-run-as-root -n 1 --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True --pre_trained=$PATH2 &> sched.log &
|
||||
fi
|
||||
cd ..
|
||||
|
||||
|
||||
export MS_ROLE=MS_PSERVER
|
||||
for((i=0;i<$MS_SERVER_NUM;i++));
|
||||
do
|
||||
rm -rf ./server_$i
|
||||
mkdir ./server_$i
|
||||
cp ../*.py ./server_$i
|
||||
cp *.sh ./server_$i
|
||||
cp -r ../src ./server_$i
|
||||
cd ./server_$i || exit
|
||||
if [ $# == 3 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n 1 --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True &> server_$i.log &
|
||||
fi
|
||||
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n 1 --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True --pre_trained=$PATH2 &> server_$i.log &
|
||||
fi
|
||||
cd ..
|
||||
rm -rf ./server_$i
|
||||
mkdir ./server_$i
|
||||
cp ../*.py ./server_$i
|
||||
cp *.sh ./server_$i
|
||||
cp -r ../src ./server_$i
|
||||
cd ./server_$i || exit
|
||||
if [ $# == 3 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n 1 --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True &> server_$i.log &
|
||||
fi
|
||||
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n 1 --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True --pre_trained=$PATH2 &> server_$i.log &
|
||||
fi
|
||||
cd ..
|
||||
done
|
||||
|
||||
export MS_ROLE=MS_WORKER
|
||||
|
@ -133,16 +132,16 @@ cp *.sh ./worker
|
|||
cp -r ../src ./worker
|
||||
cd ./worker || exit
|
||||
if [ $# == 3 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True &> worker.log &
|
||||
then
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True &> worker.log &
|
||||
fi
|
||||
|
||||
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True --pre_trained=$PATH2 &> worker.log &
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --parameter_server=True --pre_trained=$PATH2 &> worker.log &
|
||||
fi
|
||||
cd ..
|
||||
|
|
|
@ -86,7 +86,7 @@ run_ascend(){
|
|||
echo "start training for rank $RANK_ID, device $DEVICE_ID"
|
||||
env > env.log
|
||||
if [ $# == 3 ]
|
||||
then
|
||||
then
|
||||
python train.py --device_target=$1 --run_distribute=True --device_num=$DEVICE_NUM --dataset_path=$PATH2 &> train.log &
|
||||
fi
|
||||
|
||||
|
@ -140,7 +140,7 @@ run_ascend(){
|
|||
# echo "start training"
|
||||
# env > env.log
|
||||
# if [ $# == 2 ]
|
||||
# then
|
||||
# then
|
||||
# mpirun --allow-run-as-root -n $RANK_SIZE
|
||||
# python train.py --device_target=$1 --dataset_path=$PATH1 &> log &
|
||||
# fi
|
||||
|
@ -158,4 +158,4 @@ if [ $1 = "Ascend" ] ; then
|
|||
run_ascend "$@"
|
||||
else
|
||||
echo "Unsupported device target: $1"
|
||||
fi;
|
||||
fi;
|
||||
|
|
|
@ -15,9 +15,9 @@
|
|||
# ============================================================================
|
||||
|
||||
if [ $# != 2 ]
|
||||
then
|
||||
echo "Usage: sh run_distribute_train_gpu.sh [DATASET_PATH] [DEVICE_NUM]"
|
||||
exit 1
|
||||
then
|
||||
echo "Usage: sh run_distribute_train_gpu.sh [DATASET_PATH] [DEVICE_NUM]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
get_real_path(){
|
||||
|
|
|
@ -22,7 +22,7 @@ echo "hccl connect time out has changed to 600 second"
|
|||
PATH_CHECKPOINT=""
|
||||
if [ $# == 3 ]
|
||||
then
|
||||
PATH_CHECKPOINT=$3
|
||||
PATH_CHECKPOINT=$3
|
||||
fi
|
||||
|
||||
cores=`cat /proc/cpuinfo|grep "processor" |wc -l`
|
||||
|
|
|
@ -19,11 +19,11 @@ export RANK_SIZE=8
|
|||
PATH_CHECKPOINT=""
|
||||
if [ $# == 2 ]
|
||||
then
|
||||
PATH_CHECKPOINT=$2
|
||||
PATH_CHECKPOINT=$2
|
||||
fi
|
||||
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py \
|
||||
python train.py \
|
||||
--is_distribute=1 \
|
||||
--platform="GPU" \
|
||||
--pretrained=$PATH_CHECKPOINT \
|
||||
|
|
|
@ -24,4 +24,4 @@ RANK_SIZE=$1
|
|||
export CUDA_VISIBLE_DEVICES="$2"
|
||||
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE \
|
||||
python train.py > train.log 2>&1 &
|
||||
python train.py > train.log 2>&1 &
|
||||
|
|
|
@ -16,20 +16,20 @@
|
|||
|
||||
if [ $# != 4 ] && [ $# != 5 ]
|
||||
then
|
||||
echo "Usage: sh scripts/run_distribute_train.sh [squeezenet|squeezenet_residual] [cifar10|imagenet] [RANK_TABLE_FILE] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
echo "Usage: sh scripts/run_distribute_train.sh [squeezenet|squeezenet_residual] [cifar10|imagenet] [RANK_TABLE_FILE] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $1 != "squeezenet" ] && [ $1 != "squeezenet_residual" ]
|
||||
then
|
||||
echo "error: the selected net is neither squeezenet nor squeezenet_residual"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $2 != "cifar10" ] && [ $2 != "imagenet" ]
|
||||
then
|
||||
echo "error: the selected dataset is neither cifar10 nor imagenet"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
get_real_path(){
|
||||
|
@ -51,19 +51,19 @@ fi
|
|||
if [ ! -f $PATH1 ]
|
||||
then
|
||||
echo "error: RANK_TABLE_FILE=$PATH1 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d $PATH2 ]
|
||||
then
|
||||
echo "error: DATASET_PATH=$PATH2 is not a directory"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $# == 5 ] && [ ! -f $PATH3 ]
|
||||
then
|
||||
echo "error: PRETRAINED_CKPT_PATH=$PATH3 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ulimit -u unlimited
|
||||
|
@ -86,7 +86,7 @@ do
|
|||
echo "start training for rank $RANK_ID, device $DEVICE_ID"
|
||||
env > env.log
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
then
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=$DEVICE_NUM --dataset_path=$PATH2 &> log &
|
||||
fi
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
|
||||
Unet Medical model for 2D image segmentation. This implementation is as described in the original paper [UNet: Convolutional Networks for Biomedical Image Segmentation](https://arxiv.org/abs/1505.04597). Unet, in the 2015 ISBI cell tracking competition, many of the best are obtained. In this paper, a network model for medical image segmentation is proposed, and a data enhancement method is proposed to effectively use the annotation data to solve the problem of insufficient annotation data in the medical field. A U-shaped network structure is also used to extract the context and location information.
|
||||
|
||||
[Paper](https://arxiv.org/abs/1505.04597): Olaf Ronneberger, Philipp Fischer, Thomas Brox. "U-Net: Convolutional Networks for Biomedical Image Segmentation." * conditionally accepted at MICCAI 2015*. 2015.
|
||||
[Paper](https://arxiv.org/abs/1505.04597): Olaf Ronneberger, Philipp Fischer, Thomas Brox. "U-Net: Convolutional Networks for Biomedical Image Segmentation." * conditionally accepted at MICCAI 2015*. 2015.
|
||||
|
||||
|
||||
# [Model Architecture](#contents)
|
||||
|
|
|
@ -47,8 +47,8 @@ Note that you can run the scripts based on the dataset mentioned in original pap
|
|||
|
||||
#### Dataset used: [ImageNet2012](http://www.image-net.org/)
|
||||
- Dataset size: ~146G, 1.28 million colorful images in 1000 classes
|
||||
- Train: 140G, 1,281,167 images
|
||||
- Test: 6.4G, 50, 000 images
|
||||
- Train: 140G, 1,281,167 images
|
||||
- Test: 6.4G, 50, 000 images
|
||||
- Data format: RGB images
|
||||
- Note: Data will be processed in src/dataset.py
|
||||
|
||||
|
|
|
@ -30,13 +30,13 @@ TEACHER_CKPT_PATH=$5
|
|||
PROJECT_DIR=$(cd "$(dirname "$0")" || exit; pwd)
|
||||
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python ${PROJECT_DIR}/../run_general_distill.py \
|
||||
--distribute="true" \
|
||||
--device_target="GPU" \
|
||||
--epoch_size=$EPOCH_SIZE \
|
||||
--save_ckpt_path="" \
|
||||
--data_dir=$DATA_DIR \
|
||||
--schema_dir=$SCHEMA_DIR \
|
||||
--dataset_type="tfrecord" \
|
||||
--enable_data_sink="false" \
|
||||
--load_teacher_ckpt_path=$TEACHER_CKPT_PATH > log.txt 2>&1 &
|
||||
python ${PROJECT_DIR}/../run_general_distill.py \
|
||||
--distribute="true" \
|
||||
--device_target="GPU" \
|
||||
--epoch_size=$EPOCH_SIZE \
|
||||
--save_ckpt_path="" \
|
||||
--data_dir=$DATA_DIR \
|
||||
--schema_dir=$SCHEMA_DIR \
|
||||
--dataset_type="tfrecord" \
|
||||
--enable_data_sink="false" \
|
||||
--load_teacher_ckpt_path=$TEACHER_CKPT_PATH > log.txt 2>&1 &
|
||||
|
|
|
@ -36,7 +36,7 @@ Dataset used: [Oxford-IIIT Pet](https://www.robots.ox.ac.uk/~vgg/data/pets/)
|
|||
- Train: 3680 images
|
||||
- Test: 3369 images
|
||||
- Data format: RGB images.
|
||||
- Note: Data will be processed in src/dataset.py
|
||||
- Note: Data will be processed in src/dataset.py
|
||||
|
||||
# [Environment Requirements](#contents)
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ Dataset used: [Oxford-IIIT Pet](https://www.robots.ox.ac.uk/~vgg/data/pets/)
|
|||
- Train: 3680 images
|
||||
- Test: 3369 images
|
||||
- Data format: RGB images.
|
||||
- Note: Data will be processed in src/dataset.py
|
||||
- Note: Data will be processed in src/dataset.py
|
||||
|
||||
# [Environment Requirements](#contents)
|
||||
|
||||
|
|
|
@ -16,20 +16,20 @@
|
|||
|
||||
if [ $# != 4 ] && [ $# != 5 ]
|
||||
then
|
||||
echo "Usage: sh scripts/run_distribute_train.sh [squeezenet|squeezenet_residual] [cifar10|imagenet] [RANK_TABLE_FILE] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
echo "Usage: sh scripts/run_distribute_train.sh [squeezenet|squeezenet_residual] [cifar10|imagenet] [RANK_TABLE_FILE] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $1 != "squeezenet" ] && [ $1 != "squeezenet_residual" ]
|
||||
then
|
||||
echo "error: the selected net is neither squeezenet nor squeezenet_residual"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $2 != "cifar10" ] && [ $2 != "imagenet" ]
|
||||
then
|
||||
echo "error: the selected dataset is neither cifar10 nor imagenet"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
get_real_path(){
|
||||
|
@ -51,19 +51,19 @@ fi
|
|||
if [ ! -f $PATH1 ]
|
||||
then
|
||||
echo "error: RANK_TABLE_FILE=$PATH1 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d $PATH2 ]
|
||||
then
|
||||
echo "error: DATASET_PATH=$PATH2 is not a directory"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $# == 5 ] && [ ! -f $PATH3 ]
|
||||
then
|
||||
echo "error: PRETRAINED_CKPT_PATH=$PATH3 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ulimit -u unlimited
|
||||
|
@ -86,7 +86,7 @@ do
|
|||
echo "start training for rank $RANK_ID, device $DEVICE_ID"
|
||||
env > env.log
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
then
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=$DEVICE_NUM --dataset_path=$PATH2 &> log &
|
||||
fi
|
||||
|
||||
|
|
|
@ -16,20 +16,20 @@
|
|||
|
||||
if [ $# != 3 ] && [ $# != 4 ]
|
||||
then
|
||||
echo "Usage: sh scripts/run_distribute_train_gpu.sh [squeezenet|squeezenet_residual] [cifar10|imagenet] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
echo "Usage: sh scripts/run_distribute_train_gpu.sh [squeezenet|squeezenet_residual] [cifar10|imagenet] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $1 != "squeezenet" ] && [ $1 != "squeezenet_residual" ]
|
||||
then
|
||||
echo "error: the selected net is neither squeezenet nor squeezenet_residual"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $2 != "cifar10" ] && [ $2 != "imagenet" ]
|
||||
then
|
||||
echo "error: the selected dataset is neither cifar10 nor imagenet"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
get_real_path(){
|
||||
|
@ -51,13 +51,13 @@ fi
|
|||
if [ ! -d $PATH1 ]
|
||||
then
|
||||
echo "error: DATASET_PATH=$PATH1 is not a directory"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $# == 5 ] && [ ! -f $PATH2 ]
|
||||
then
|
||||
echo "error: PRETRAINED_CKPT_PATH=$PATH2 is not a file"
|
||||
exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ulimit -u unlimited
|
||||
|
@ -71,15 +71,15 @@ cp -r ./src ./train_parallel
|
|||
cd ./train_parallel || exit
|
||||
|
||||
if [ $# == 3 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 &> log &
|
||||
then
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 &> log &
|
||||
fi
|
||||
|
||||
if [ $# == 4 ]
|
||||
then
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --pre_trained=$PATH2 &> log &
|
||||
mpirun --allow-run-as-root -n $RANK_SIZE --output-filename log_output --merge-stderr-to-stdout \
|
||||
python train.py --net=$1 --dataset=$2 --run_distribute=True \
|
||||
--device_num=$DEVICE_NUM --device_target="GPU" --dataset_path=$PATH1 --pre_trained=$PATH2 &> log &
|
||||
fi
|
||||
|
|
|
@ -78,5 +78,5 @@ time mpirun -n $RANK_SIZE --allow-run-as-root python3 ${current_exec_path}/train
|
|||
--distributed \
|
||||
--data_path /path_to_ImageNet/ \
|
||||
--GPU \
|
||||
--dataset_sink > tinynet_c.log 2>&1 &
|
||||
--dataset_sink > tinynet_c.log 2>&1 &
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ function checkopts()
|
|||
usage
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
;;
|
||||
h)
|
||||
usage
|
||||
exit 0
|
||||
|
|
|
@ -22,10 +22,10 @@ PYTHONTEST_DIR="${PROJECT_PATH}/tests/perf_test"
|
|||
PERF_RESULT_DIR="${CURRPATH}/"
|
||||
PERF_SUFFIX=".perf"
|
||||
if [[ "${BUILD_PATH}" ]];then
|
||||
echo "BUILD_PATH = ${BUILD_PATH}"
|
||||
echo "BUILD_PATH = ${BUILD_PATH}"
|
||||
else
|
||||
BUILD_PATH="${PROJECT_PATH}/build"
|
||||
echo "BUILD_PATH = ${BUILD_PATH}"
|
||||
echo "BUILD_PATH = ${BUILD_PATH}"
|
||||
fi
|
||||
|
||||
cd "${PROJECT_PATH}"; sh build.sh -t off -l none -r -p on -j 20; cd -
|
||||
|
|
|
@ -18,10 +18,10 @@ set -e
|
|||
BASEPATH=$(cd "$(dirname "$0")"; pwd)
|
||||
PROJECT_PATH=${BASEPATH}/../../..
|
||||
if [ $BUILD_PATH ];then
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
else
|
||||
BUILD_PATH=${PROJECT_PATH}/build
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
BUILD_PATH=${PROJECT_PATH}/build
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
fi
|
||||
cd ${BUILD_PATH}/mindspore/tests/ut/cpp
|
||||
|
||||
|
|
|
@ -18,10 +18,10 @@ IGNORE_EXEC="--ignore=$CURRPATH/exec"
|
|||
PROJECT_PATH=$(cd ${CURRPATH}/../../..; pwd)
|
||||
|
||||
if [ $BUILD_PATH ];then
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
else
|
||||
BUILD_PATH=${PROJECT_PATH}/build
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
fi
|
||||
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${BUILD_PATH}/third_party/gtest/lib
|
||||
|
@ -84,4 +84,4 @@ else
|
|||
fi
|
||||
|
||||
RET=$?
|
||||
exit ${RET}
|
||||
exit ${RET}
|
||||
|
|
|
@ -17,10 +17,10 @@
|
|||
CURRPATH=$(cd $(dirname $0); pwd)
|
||||
PROJECT_PATH=${CURRPATH}/../..
|
||||
if [ $BUILD_PATH ];then
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
else
|
||||
BUILD_PATH=${PROJECT_PATH}/build
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
echo "BUILD_PATH = $BUILD_PATH"
|
||||
fi
|
||||
cd ${BUILD_PATH}/mindspore/tests/ut
|
||||
|
||||
|
|
Loading…
Reference in New Issue