You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

run_distribute_train.sh 1.7 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465
  1. #!/bin/bash
  2. # Copyright 2020 Huawei Technologies Co., Ltd
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. # ============================================================================
  16. if [ $# != 2 ]
  17. then
  18. echo "Usage: sh run_distribute_train.sh [MINDSPORE_HCCL_CONFIG_PATH] [DATASET_PATH]"
  19. exit 1
  20. fi
  21. get_real_path(){
  22. if [ "${1:0:1}" == "/" ]; then
  23. echo "$1"
  24. else
  25. echo "$(realpath -m $PWD/$1)"
  26. fi
  27. }
  28. PATH1=$(get_real_path $1)
  29. PATH2=$(get_real_path $2)
  30. if [ ! -f "$PATH1" ]
  31. then
  32. echo "error: MINDSPORE_HCCL_CONFIG_PATH=$PATH1 is not a file"
  33. exit 1
  34. fi
  35. if [ ! -d "$PATH2" ]
  36. then
  37. echo "error: DATASET_PATH=$PATH2 is not a directory"
  38. exit 1
  39. fi
  40. ulimit -u unlimited
  41. export DEVICE_NUM=8
  42. export RANK_SIZE=8
  43. export MINDSPORE_HCCL_CONFIG_PATH=$PATH1
  44. export RANK_TABLE_FILE=$PATH1
  45. for((i=0; i<${DEVICE_NUM}; i++))
  46. do
  47. export DEVICE_ID=$i
  48. export RANK_ID=$i
  49. rm -rf ./train_parallel$i
  50. mkdir ./train_parallel$i
  51. cp *.py ./train_parallel$i
  52. cp *.sh ./train_parallel$i
  53. cd ./train_parallel$i || exit
  54. echo "start training for rank $RANK_ID, device $DEVICE_ID"
  55. env > env.log
  56. python train.py --do_train=True --run_distribute=True --device_num=$DEVICE_NUM --dataset_path=$PATH2 &> log &
  57. cd ..
  58. done