You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

run_distribute_pretrain.sh 2.3 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667
  1. #!/bin/bash
  2. # Copyright 2020 Huawei Technologies Co., Ltd
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. # ============================================================================
  16. echo "=============================================================================================================="
  17. echo "Please run the scipt as: "
  18. echo "sh run_distribute_pretrain.sh DEVICE_NUM EPOCH_SIZE DATA_DIR SCHEMA_DIR MINDSPORE_HCCL_CONFIG_PATH"
  19. echo "for example: sh run_distribute_pretrain.sh 8 40 /path/zh-wiki/ /path/Schema.json /path/hccl.json"
  20. echo "It is better to use absolute path."
  21. echo "=============================================================================================================="
  22. EPOCH_SIZE=$2
  23. DATA_DIR=$3
  24. SCHEMA_DIR=$4
  25. export MINDSPORE_HCCL_CONFIG_PATH=$5
  26. export RANK_TABLE_FILE=$5
  27. export RANK_SIZE=$1
  28. for((i=0;i<RANK_SIZE;i++))
  29. do
  30. start=`expr $i \* 12`
  31. export DEVICE_ID=$i
  32. export RANK_ID=$i
  33. export DEPLOY_MODE=0
  34. export GE_USE_STATIC_MEMORY=1
  35. end=`expr $start \+ 11`
  36. cmdopt=$start"-"$end
  37. rm -rf LOG$i
  38. mkdir ./LOG$i
  39. cp *.py ./LOG$i
  40. cd ./LOG$i || exit
  41. echo "start training for rank $i, device $DEVICE_ID"
  42. env > env.log
  43. taskset -c $cmdopt python ../run_pretrain.py \
  44. --distribute="true" \
  45. --epoch_size=$EPOCH_SIZE \
  46. --device_id=$DEVICE_ID \
  47. --device_num=$RANK_SIZE \
  48. --enable_task_sink="true" \
  49. --enable_loop_sink="true" \
  50. --enable_mem_reuse="true" \
  51. --enable_save_ckpt="true" \
  52. --enable_lossscale="true" \
  53. --do_shuffle="true" \
  54. --enable_data_sink="true" \
  55. --data_sink_steps=1 \
  56. --checkpoint_path="" \
  57. --save_checkpoint_steps=10000 \
  58. --save_checkpoint_num=1 \
  59. --data_dir=$DATA_DIR \
  60. --schema_dir=$SCHEMA_DIR > log.txt 2>&1 &
  61. cd ../
  62. done