You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

run_distribute_train.sh 2.3 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. #!/bin/bash
  2. # Copyright 2020 Huawei Technologies Co., Ltd
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. # ============================================================================
  16. echo "=============================================================================================================="
  17. echo "Please run the scipt as: "
  18. echo "bash run_distribute_train.sh DEVICE_NUM EPOCH_SIZE DATA_DIR MINDSPORE_HCCL_CONFIG_PATH"
  19. echo "for example: bash run_distribute_train.sh 8 40 /path/zh-wiki/ /path/hccl.json"
  20. echo "It is better to use absolute path."
  21. echo "=============================================================================================================="
  22. EPOCH_SIZE=$2
  23. DATA_DIR=$3
  24. export MINDSPORE_HCCL_CONFIG_PATH=$4
  25. export RANK_TABLE_FILE=$4
  26. export RANK_SIZE=$1
  27. cores=`cat /proc/cpuinfo|grep "processor" |wc -l`
  28. echo "the number of logical core" $cores
  29. avg_core_per_rank=`expr $cores \/ $RANK_SIZE`
  30. core_gap=`expr $avg_core_per_rank \- 1`
  31. echo "avg_core_per_rank" $avg_core_per_rank
  32. echo "core_gap" $core_gap
  33. for((i=0;i<RANK_SIZE;i++))
  34. do
  35. start=`expr $i \* $avg_core_per_rank`
  36. export DEVICE_ID=$i
  37. export RANK_ID=$i
  38. export DEPLOY_MODE=0
  39. export GE_USE_STATIC_MEMORY=1
  40. end=`expr $start \+ $core_gap`
  41. cmdopt=$start"-"$end
  42. rm -rf LOG$i
  43. mkdir ./LOG$i
  44. cp *.py ./LOG$i
  45. cd ./LOG$i || exit
  46. echo "start training for rank $i, device $DEVICE_ID"
  47. mkdir -p ms_log
  48. CUR_DIR=`pwd`
  49. export GLOG_log_dir=${CUR_DIR}/ms_log
  50. export GLOG_logtostderr=0
  51. env > env.log
  52. taskset -c $cmdopt python ../train.py \
  53. --distribute="true" \
  54. --epoch_size=$EPOCH_SIZE \
  55. --device_id=$DEVICE_ID \
  56. --enable_save_ckpt="true" \
  57. --checkpoint_url="" \
  58. --save_checkpoint_steps=10000 \
  59. --save_checkpoint_num=1 \
  60. --data_url=$DATA_DIR > log.txt 2>&1 &
  61. cd ../
  62. done