You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

run_distribute_train.sh 3.4 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. #!/bin/bash
  2. # Copyright 2020 Huawei Technologies Co., Ltd
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. # ============================================================================
  16. echo "======================================================================================================================================================="
  17. echo "Please run the scipt as: "
  18. echo "sh run_distribute_train.sh DEVICE_NUM EPOCH_SIZE MINDRECORD_DIR IMAGE_DIR ANNO_PATH MINDSPORE_HCCL_CONFIG_PATH PRE_TRAINED PRE_TRAINED_EPOCH_SIZE"
  19. echo "For example: sh run_distribute_train.sh 8 150 /data/Mindrecord_train /data /data/train.txt /data/hccl.json /opt/yolov3-150.ckpt(optional) 100(optional)"
  20. echo "It is better to use absolute path."
  21. echo "The learning rate is 0.005 as default, if you want other lr, please change the value in this script."
  22. echo "======================================================================================================================================================="
  23. if [ $# != 6 ] && [ $# != 8 ]
  24. then
  25. echo "Usage: sh run_distribute_train.sh [DEVICE_NUM] [EPOCH_SIZE] [MINDRECORD_DIR] [IMAGE_DIR] [ANNO_PATH] [MINDSPORE_HCCL_CONFIG_PATH] \
  26. [PRE_TRAINED](optional) [PRE_TRAINED_EPOCH_SIZE](optional)"
  27. exit 1
  28. fi
  29. EPOCH_SIZE=$2
  30. MINDRECORD_DIR=$3
  31. IMAGE_DIR=$4
  32. ANNO_PATH=$5
  33. PRE_TRAINED=$7
  34. PRE_TRAINED_EPOCH_SIZE=$8
  35. # Before start distribute train, first create mindrecord files.
  36. python train.py --only_create_dataset=1 --mindrecord_dir=$MINDRECORD_DIR --image_dir=$IMAGE_DIR \
  37. --anno_path=$ANNO_PATH
  38. echo "After running the scipt, the network runs in the background. The log will be generated in LOGx/log.txt"
  39. export MINDSPORE_HCCL_CONFIG_PATH=$6
  40. export RANK_SIZE=$1
  41. BASE_PATH=$(cd "`dirname $0`" || exit; pwd)
  42. cd $BASE_PATH/../ || exit
  43. for((i=0;i<RANK_SIZE;i++))
  44. do
  45. export DEVICE_ID=$i
  46. start=`expr $i \* 12`
  47. end=`expr $start \+ 11`
  48. cmdopt=$start"-"$end
  49. rm -rf LOG$i
  50. mkdir ./LOG$i
  51. cp *.py ./LOG$i
  52. cp -r ./src ./LOG$i
  53. cd ./LOG$i || exit
  54. export RANK_ID=$i
  55. echo "start training for rank $i, device $DEVICE_ID"
  56. env > env.log
  57. if [ $# == 6 ]
  58. then
  59. taskset -c $cmdopt python train.py \
  60. --distribute=1 \
  61. --lr=0.005 \
  62. --device_num=$RANK_SIZE \
  63. --device_id=$DEVICE_ID \
  64. --mindrecord_dir=$MINDRECORD_DIR \
  65. --image_dir=$IMAGE_DIR \
  66. --epoch_size=$EPOCH_SIZE \
  67. --anno_path=$ANNO_PATH > log.txt 2>&1 &
  68. fi
  69. if [ $# == 8 ]
  70. then
  71. taskset -c $cmdopt python train.py \
  72. --distribute=1 \
  73. --lr=0.005 \
  74. --device_num=$RANK_SIZE \
  75. --device_id=$DEVICE_ID \
  76. --mindrecord_dir=$MINDRECORD_DIR \
  77. --image_dir=$IMAGE_DIR \
  78. --epoch_size=$EPOCH_SIZE \
  79. --pre_trained=$PRE_TRAINED \
  80. --pre_trained_epoch_size=$PRE_TRAINED_EPOCH_SIZE \
  81. --anno_path=$ANNO_PATH > log.txt 2>&1 &
  82. fi
  83. cd ../
  84. done