You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

run_distribute_train.sh 1.7 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162
  1. #!/bin/bash
  2. # Copyright 2020 Huawei Technologies Co., Ltd
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. # ============================================================================
  16. if [ $# != 2 ]; then
  17. echo "Usage: sh run_distribute_train.sh [MINDSPORE_HCCL_CONFIG_PATH] [DATASET_PATH]"
  18. exit 1
  19. fi
  20. get_real_path() {
  21. if [ "${1:0:1}" == "/" ]; then
  22. echo "$1"
  23. else
  24. echo "$(realpath -m $PWD/$1)"
  25. fi
  26. }
  27. PATH1=$(get_real_path $1)
  28. PATH2=$(get_real_path $2)
  29. if [ ! -f $PATH1 ]; then
  30. echo "error: MINDSPORE_HCCL_CONFIG_PATH=$PATH1 is not a file"
  31. exit 1
  32. fi
  33. if [ ! -d $PATH2 ]; then
  34. echo "error: DATASET_PATH=$PATH2 is not a directory"
  35. exit 1
  36. fi
  37. ulimit -u unlimited
  38. export DEVICE_NUM=8
  39. export RANK_SIZE=8
  40. export MINDSPORE_HCCL_CONFIG_PATH=$PATH1
  41. export RANK_TABLE_FILE=$PATH1
  42. for ((i = 0; i < ${DEVICE_NUM}; i++)); do
  43. export DEVICE_ID=$i
  44. export RANK_ID=$i
  45. rm -rf ./train_parallel$i
  46. mkdir ./train_parallel$i
  47. cp ../*.py ./train_parallel$i
  48. cp *.sh ./train_parallel$i
  49. cp -r ../src ./train_parallel$i
  50. cd ./train_parallel$i || exit
  51. echo "start training for rank $RANK_ID, device $DEVICE_ID"
  52. env >env.log
  53. python train.py --run_distribute=True --device_num=$DEVICE_NUM --dataset_path=$PATH2 &>log &
  54. cd ..
  55. done