You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

optimizer_info.h 4.1 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #ifndef MINDSPORE_CCSRC_FRONTEND_PARALLEL_PS_OPTIMIZER_INFO_H_
  17. #define MINDSPORE_CCSRC_FRONTEND_PARALLEL_PS_OPTIMIZER_INFO_H_
  18. #include <vector>
  19. #include "backend/kernel_compiler/kernel.h"
  20. #include "frontend/parallel/ps/common.h"
  21. namespace mindspore {
  22. namespace parallel {
  23. namespace ps {
  24. using mindspore::kernel::AddressPtr;
  25. class OptimizerInfo {
  26. public:
  27. OptimizerInfo() = default;
  28. virtual ~OptimizerInfo() = default;
  29. virtual void Update(const Values &values, const Lengths &lengths) {}
  30. virtual void UpdateWeight(const WeightPtr &weight);
  31. virtual void Accumulate(const Values &values, const Lengths &lengths) = 0;
  32. virtual void ComputeMean(size_t n) {}
  33. virtual void Reset() {}
  34. void AddWorkspace(const AddressPtr &workspace);
  35. virtual const AddressPtr &gradient() = 0;
  36. virtual const AddressPtr &indices() = 0;
  37. const std::vector<AddressPtr> &inputs();
  38. const std::vector<AddressPtr> &workspaces();
  39. const std::vector<AddressPtr> &outputs();
  40. virtual bool IsSparse() const;
  41. virtual size_t grad_index();
  42. virtual size_t indices_index();
  43. protected:
  44. std::vector<AddressPtr> inputs_;
  45. std::vector<AddressPtr> workspaces_;
  46. std::vector<AddressPtr> outputs_;
  47. };
  48. class DenseOptimInfo : public OptimizerInfo {
  49. public:
  50. DenseOptimInfo() = default;
  51. ~DenseOptimInfo() override = default;
  52. void Accumulate(const Values &values, const Lengths &lens) override;
  53. void ComputeMean(size_t n) override;
  54. void Reset() override;
  55. };
  56. class SparseOptimInfo : public OptimizerInfo {
  57. public:
  58. SparseOptimInfo() = default;
  59. ~SparseOptimInfo() override = default;
  60. void Accumulate(const Values &values, const Lengths &lens) override;
  61. void Reset() override;
  62. protected:
  63. size_t grads_offset_{0};
  64. size_t indices_offset_{0};
  65. };
  66. class MomentumOptimInfo : public DenseOptimInfo {
  67. public:
  68. MomentumOptimInfo(const AddressPtr &weight, const AddressPtr &accumulate, const AddressPtr &learning_rate,
  69. const AddressPtr &gradient, const AddressPtr &momentum);
  70. ~MomentumOptimInfo() override = default;
  71. void Update(const Values &values, const Lengths &lens) override;
  72. const AddressPtr &gradient();
  73. const AddressPtr &indices();
  74. size_t grad_index() override;
  75. };
  76. class SparseAdamOptimInfo : public SparseOptimInfo {
  77. public:
  78. SparseAdamOptimInfo(const AddressPtr &weight, const AddressPtr &m, const AddressPtr &v, const AddressPtr &beta1_power,
  79. const AddressPtr &beta2_power, const AddressPtr &learning_rate, const AddressPtr &beta1,
  80. const AddressPtr &beta2, const AddressPtr &epsilon, const AddressPtr &grad,
  81. const AddressPtr &indices);
  82. ~SparseAdamOptimInfo() override = default;
  83. void Update(const Values &values, const Lengths &lens) override;
  84. const AddressPtr &gradient();
  85. const AddressPtr &indices();
  86. bool IsSparse() const override;
  87. size_t grad_index() override;
  88. size_t indices_index() override;
  89. };
  90. class SparseFtrlOptimInfo : public SparseOptimInfo {
  91. public:
  92. SparseFtrlOptimInfo(const AddressPtr &weight, const AddressPtr &accum, const AddressPtr &linear,
  93. const AddressPtr &grad, const AddressPtr &indices);
  94. ~SparseFtrlOptimInfo() override = default;
  95. const AddressPtr &gradient();
  96. const AddressPtr &indices();
  97. bool IsSparse() const override;
  98. size_t grad_index() override;
  99. size_t indices_index() override;
  100. };
  101. } // namespace ps
  102. } // namespace parallel
  103. } // namespace mindspore
  104. #endif // MINDSPORE_CCSRC_FRONTEND_PARALLEL_PS_OPTIMIZER_INFO_H_