You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

device_manager.h 4.6 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130
  1. /**
  2. * Copyright 2019 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #ifndef MINDSPORE_CCSRC_PARALLEL_DEVICE_MANAGER_H_
  17. #define MINDSPORE_CCSRC_PARALLEL_DEVICE_MANAGER_H_
  18. #include <cstdint>
  19. #include <cstring>
  20. #include <map>
  21. #include <memory>
  22. #include <string>
  23. #include <utility>
  24. #include <vector>
  25. #include "common/utils.h"
  26. #include "parallel/device.h"
  27. #include "parallel/device_matrix.h"
  28. #include "parallel/group_manager.h"
  29. #include "parallel/status.h"
  30. #include "parallel/strategy.h"
  31. #include "utils/convert_utils.h"
  32. namespace mindspore {
  33. namespace parallel {
  34. #define MAX_DEVICE_NUM 1024
  35. constexpr char HCCL_BACKEND[] = "hccl";
  36. constexpr char NCCL_BACKEND[] = "nccl";
  37. constexpr char UNDEFINED_BACKEND[] = "undefined_backend";
  38. class DeviceManager;
  39. using DeviceManagerPtr = std::shared_ptr<DeviceManager>;
  40. // 'g_device_manager' is the globally unique manager to manage the devices.
  41. extern DeviceManagerPtr g_device_manager;
  42. class Stage {
  43. // This class is used in pipeline-parallelization. Available devices are partitioned into multiple stages.
  44. // Currently, the function of pipeline-parallelization and this class are NOT implemented.
  45. public:
  46. explicit Stage(std::vector<Device> devices) : devices_(std::move(devices)), number_(0), rank_(0) {
  47. gm_ = GroupManager();
  48. }
  49. Stage(const std::vector<mindspore::parallel::Device> &devices, int num, int rank);
  50. ~Stage() = default;
  51. int GetStageNum() const { return number_; }
  52. size_t GetDevicesNum() const { return devices_.size(); }
  53. std::vector<Device> GetDevicesList() { return devices_; }
  54. int global_rank(Group *g) const;
  55. private:
  56. std::vector<Device> devices_;
  57. int number_;
  58. int32_t rank_;
  59. GroupManager gm_;
  60. };
  61. // This method is used for initializing the global DeviceManager 'g_device_manager',
  62. // arguments including 'device_num' and 'global_rank'
  63. bool InitDevice(int32_t device_num, int32_t global_rank, const std::string &backend);
  64. void CheckGlobalDeviceManager();
  65. std::string HashName(const std::string &rank_list_name);
  66. class DeviceManager {
  67. // This class is used to manage the abstract devices, including group-related and stage-related management.
  68. public:
  69. DeviceManager() : local_rank_(0), global_rank_(0), stage_num_(0) { gm_ = GroupManager(); }
  70. ~DeviceManager() = default;
  71. Status Init(const RankList &devices, int32_t local_device, const RankList &stage_map, const std::string &backend);
  72. static DeviceManager &GetInstance();
  73. RankList GetDeviceListByStageId(int32_t stage_id) const;
  74. RankList global_device_list(int32_t stage_id, int32_t rank, int32_t split_num) const;
  75. Device CreateNewDeviceByRank(int32_t rank) const;
  76. std::vector<Device> CreateDeviceListByRankList(RankList ranks);
  77. std::string GenerateGroupNameByRanks(RankList dev_ranks);
  78. Group CreateGroup(const std::string &group_name, const std::vector<Device> &devices);
  79. Group CreateGroup(const RankList &dev_ranks);
  80. std::shared_ptr<Stage> GetStageById(int32_t stage_id);
  81. size_t DeviceNum() const { return devices_.size(); }
  82. int32_t GetStageNum() const { return static_cast<const int32_t>(stage_devices_.size()); }
  83. int32_t global_rank() const { return global_rank_; }
  84. std::string backend() const { return backend_; }
  85. void set_global_rank(int32_t global_rank) { global_rank_ = global_rank; }
  86. void Clear();
  87. std::string world_group() const { return gm_.world_group(); }
  88. std::string FindRankListNameByHashName(const std::string &hash_name);
  89. private:
  90. std::vector<std::shared_ptr<Device>> devices_;
  91. // each stage has a list of devices
  92. std::vector<std::vector<int32_t>> stage_devices_;
  93. std::shared_ptr<Device> device_;
  94. std::vector<std::shared_ptr<Stage>> stages_;
  95. GroupManager gm_;
  96. std::string backend_;
  97. // bimap:
  98. std::map<std::string, std::string> rank_to_group_; // the key is rank list, value is hash name
  99. std::map<std::string, std::string> group_to_rank_; // the key is hash name, value is rank list
  100. int32_t local_rank_;
  101. int32_t global_rank_;
  102. int32_t stage_num_;
  103. };
  104. } // namespace parallel
  105. } // namespace mindspore
  106. #endif // MINDSPORE_CCSRC_PARALLEL_DEVICE_MANAGER_H_