You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

learning_rate_schedule.py 15 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """Learning rate schedule."""
  16. import math
  17. from ..common import dtype as mstype
  18. from ..ops import operations as P
  19. from .cell import Cell
  20. from .._checkparam import Validator as validator
  21. from .._checkparam import Rel
  22. class LearningRateSchedule(Cell):
  23. """Basic class of learning rate schedule."""
  24. def __init__(self):
  25. super(LearningRateSchedule, self).__init__()
  26. def construct(self, global_step):
  27. """
  28. Defines the computation to get the current learning rate.
  29. This method must be overridden by all subclasses.
  30. Note:
  31. The output must be a Tensor of scalar.
  32. Inputs:
  33. Tensor. The current step number.
  34. """
  35. raise NotImplementedError
  36. def _check_inputs(learning_rate, decay_rate, decay_steps, is_stair, cls_name):
  37. validator.check_positive_int(decay_steps, 'decay_steps', cls_name)
  38. validator.check_positive_float(learning_rate, 'learning_rate', cls_name)
  39. validator.check_is_float(learning_rate, 'learning_rate', cls_name)
  40. validator.check_positive_float(decay_rate, 'decay_rate', cls_name)
  41. validator.check_is_float(decay_rate, 'decay_rate', cls_name)
  42. validator.check_value_type('is_stair', is_stair, [bool], cls_name)
  43. class ExponentialDecayLR(LearningRateSchedule):
  44. r"""
  45. Calculate learning rate base on exponential decay function.
  46. For the i-th step, the formula of computing decayed_learning_rate[i] is:
  47. .. math::
  48. decayed\_learning\_rate[i] = learning\_rate * decay\_rate^{p}
  49. Where :
  50. .. math::
  51. p = \frac{current\_step}{decay\_steps}
  52. If `is_stair` is True, the formula is :
  53. .. math::
  54. p = floor(\frac{current\_step}{decay\_steps})
  55. Args:
  56. learning_rate (float): The initial value of learning rate.
  57. decay_rate (float): The decay rate.
  58. decay_steps (int): A value used to calculate decayed learning rate.
  59. is_stair (bool): If true, learning rate is decayed once every `decay_steps` time. Default: False.
  60. Inputs:
  61. Tensor. The current step number.
  62. Returns:
  63. Tensor. The learning rate value for the current step.
  64. Examples:
  65. >>> learning_rate = 0.1
  66. >>> decay_rate = 0.9
  67. >>> decay_steps = 4
  68. >>> global_step = Tensor(2, mstype.int32)
  69. >>> exponential_decay_lr = ExponentialDecayLR(learning_rate, decay_rate, decay_steps)
  70. >>> exponential_decay_lr(global_step)
  71. """
  72. def __init__(self, learning_rate, decay_rate, decay_steps, is_stair=False):
  73. super(ExponentialDecayLR, self).__init__()
  74. _check_inputs(learning_rate, decay_rate, decay_steps, is_stair, self.cls_name)
  75. self.learning_rate = learning_rate
  76. self.decay_rate = decay_rate
  77. self.decay_steps = decay_steps
  78. self.is_stair = is_stair
  79. self.pow = P.Pow()
  80. self.cast = P.Cast()
  81. def construct(self, global_step):
  82. p = self.cast(global_step, mstype.float32) / self.decay_steps
  83. if self.is_stair:
  84. p = P.Floor()(p)
  85. return self.learning_rate * self.pow(self.decay_rate, p)
  86. class NaturalExpDecayLR(LearningRateSchedule):
  87. r"""
  88. Calculate learning rate base on natural exponential decay function.
  89. For the i-th step, the formula of computing decayed_learning_rate[i] is:
  90. .. math::
  91. decayed\_learning\_rate[i] = learning\_rate * e^{-decay\_rate * p}
  92. Where :
  93. .. math::
  94. p = \frac{current\_step}{decay\_steps}
  95. If `is_stair` is True, the formula is :
  96. .. math::
  97. p = floor(\frac{current\_step}{decay\_steps})
  98. Args:
  99. learning_rate (float): The initial value of learning rate.
  100. decay_rate (float): The decay rate.
  101. decay_steps (int): A value used to calculate decayed learning rate.
  102. is_stair (bool): If true, learning rate is decayed once every `decay_steps` time. Default: False.
  103. Inputs:
  104. Tensor. The current step number.
  105. Returns:
  106. Tensor. The learning rate value for the current step.
  107. Examples:
  108. >>> learning_rate = 0.1
  109. >>> decay_rate = 0.9
  110. >>> decay_steps = 4
  111. >>> global_step = Tensor(2, mstype.int32)
  112. >>> natural_exp_decay_lr = NaturalExpDecayLR(learning_rate, decay_rate, decay_steps, True)
  113. >>> natural_exp_decay_lr(global_step)
  114. """
  115. def __init__(self, learning_rate, decay_rate, decay_steps, is_stair=False):
  116. super(NaturalExpDecayLR, self).__init__()
  117. _check_inputs(learning_rate, decay_rate, decay_steps, is_stair, self.cls_name)
  118. self.learning_rate = learning_rate
  119. self.decay_rate = decay_rate
  120. self.decay_steps = decay_steps
  121. self.is_stair = is_stair
  122. self.math_e = math.e
  123. self.pow = P.Pow()
  124. self.cast = P.Cast()
  125. def construct(self, global_step):
  126. p = self.cast(global_step, mstype.float32)
  127. if self.is_stair:
  128. p = P.FloorDiv()(p, self.decay_steps) * self.decay_steps
  129. return self.learning_rate * self.pow(self.math_e, -self.decay_rate * p)
  130. class InverseDecayLR(LearningRateSchedule):
  131. r"""
  132. Calculate learning rate base on inverse-time decay function.
  133. For the i-th step, the formula of computing decayed_learning_rate[i] is:
  134. .. math::
  135. decayed\_learning\_rate[i] = learning\_rate / (1 + decay\_rate * p)
  136. Where :
  137. .. math::
  138. p = \frac{current\_step}{decay\_steps}
  139. If `is_stair` is True, The formula is :
  140. .. math::
  141. p = floor(\frac{current\_step}{decay\_steps})
  142. Args:
  143. learning_rate (float): The initial value of learning rate.
  144. decay_rate (float): The decay rate.
  145. decay_steps (int): A value used to calculate decayed learning rate.
  146. is_stair (bool): If true, learning rate decay once every `decay_steps` times. Default: False.
  147. Inputs:
  148. Tensor. The current step number.
  149. Returns:
  150. Tensor. The learning rate value for the current step.
  151. Examples:
  152. >>> learning_rate = 0.1
  153. >>> decay_rate = 0.9
  154. >>> decay_steps = 4
  155. >>> global_step = Tenosr(2, mstype.int32)
  156. >>> inverse_decay_lr = InverseDecayLR(learning_rate, decay_rate, decay_steps, True)
  157. >>> inverse_decay_lr(global_step)
  158. """
  159. def __init__(self, learning_rate, decay_rate, decay_steps, is_stair=False):
  160. super(InverseDecayLR, self).__init__()
  161. _check_inputs(learning_rate, decay_rate, decay_steps, is_stair, self.cls_name)
  162. self.learning_rate = learning_rate
  163. self.decay_rate = decay_rate
  164. self.decay_steps = decay_steps
  165. self.is_stair = is_stair
  166. self.cast = P.Cast()
  167. def construct(self, global_step):
  168. p = self.cast(global_step, mstype.float32) / self.decay_steps
  169. if self.is_stair:
  170. p = P.Floor()(p)
  171. return self.learning_rate / (1 + self.decay_rate * p)
  172. class CosineDecayLR(LearningRateSchedule):
  173. r"""
  174. Calculate learning rate base on cosine decay function.
  175. For the i-th step, the formula of computing decayed_learning_rate[i] is:
  176. .. math::
  177. decayed\_learning\_rate[i] = min\_learning\_rate + 0.5 * (max\_learning\_rate - min\_learning\_rate) *
  178. (1 + cos(\frac{current\_step}{decay\_steps}\pi))
  179. Args:
  180. min_lr (float): The minimum value of learning rate.
  181. max_lr (float): The maximum value of learning rate.
  182. decay_steps (int): A value used to calculate decayed learning rate.
  183. Inputs:
  184. Tensor. The current step number.
  185. Returns:
  186. Tensor. The learning rate value for the current step.
  187. Examples:
  188. >>> min_lr = 0.01
  189. >>> max_lr = 0.1
  190. >>> decay_steps = 4
  191. >>> global_step = Tensor(2, mstype.int32)
  192. >>> cosine_decay_lr = CosineDecayLR(min_lr, max_lr, decay_steps)
  193. >>> cosine_decay_lr(global_steps)
  194. """
  195. def __init__(self, min_lr, max_lr, decay_steps):
  196. super(CosineDecayLR, self).__init__()
  197. if not isinstance(min_lr, float):
  198. raise TypeError("min_lr must be float.")
  199. validator.check_non_negative_float(min_lr, "min_lr", self.cls_name)
  200. validator.check_positive_float(max_lr, 'max_lr', self.cls_name)
  201. validator.check_is_float(max_lr, 'max_lr', self.cls_name)
  202. validator.check_positive_int(decay_steps, "decay_steps", self.cls_name)
  203. if min_lr >= max_lr:
  204. raise ValueError('`max_lr` should be greater than `min_lr`.')
  205. self.min_lr = min_lr
  206. self.max_lr = max_lr
  207. self.decay_steps = decay_steps
  208. self.math_pi = math.pi
  209. self.delta = 0.5 * (max_lr - min_lr)
  210. self.cos = P.Cos()
  211. self.min = P.Minimum()
  212. self.cast = P.Cast()
  213. def construct(self, global_step):
  214. p = self.cast(self.min(global_step, self.decay_steps), mstype.float32)
  215. return self.min_lr + self.delta * (1.0 + self.cos(self.math_pi * p / self.decay_steps))
  216. class PolynomialDecayLR(LearningRateSchedule):
  217. r"""
  218. Calculate learning rate base on polynomial decay function.
  219. For the i-th step, the formula of computing decayed_learning_rate[i] is:
  220. .. math::
  221. decayed\_learning\_rate[i] = (learning\_rate - end\_learning\_rate) *
  222. (1 - tmp\_step / tmp\_decay\_steps)^{power} + end\_learning\_rate
  223. Where :
  224. .. math::
  225. tmp\_step=min(current\_step, decay\_steps)
  226. If `update_decay_steps` is true, update the value of `tmp_decay_step` every `decay_steps`. The formula is :
  227. .. math::
  228. tmp\_decay\_steps = decay\_steps * ceil(current\_step / decay\_steps)
  229. Args:
  230. learning_rate (float): The initial value of learning rate.
  231. end_learning_rate (float): The end value of learning rate.
  232. decay_steps (int): A value used to calculate decayed learning rate.
  233. power (float): A value used to calculate decayed learning rate. This parameter must be greater than 0.
  234. update_decay_steps (bool): If true, learning rate is decayed once every `decay_steps` time. Default: False.
  235. Inputs:
  236. Tensor. The current step number.
  237. Returns:
  238. Tensor. The learning rate value for the current step.
  239. Examples:
  240. >>> learning_rate = 0.1
  241. >>> end_learning_rate = 0.01
  242. >>> decay_steps = 4
  243. >>> power = 0.5
  244. >>> global_step = Tensor(2, mstype.int32)
  245. >>> polynomial_decay_lr = PolynomialDecayLR(learning_rate, end_learning_rate, decay_steps, power)
  246. >>> polynomial_decay_lr(global_step)
  247. """
  248. def __init__(self, learning_rate, end_learning_rate, decay_steps, power, update_decay_steps=False):
  249. super(PolynomialDecayLR, self).__init__()
  250. validator.check_positive_float(learning_rate, 'learning_rate')
  251. validator.check_is_float(learning_rate, 'learning_rate')
  252. if not isinstance(end_learning_rate, float):
  253. raise TypeError("end_learning_rate must be float.")
  254. validator.check_non_negative_float(end_learning_rate, "end_learning_rate", self.cls_name)
  255. validator.check_positive_int(decay_steps, 'decay_steps', self.cls_name)
  256. validator.check_value_type('update_decay_steps', update_decay_steps, [bool], self.cls_name)
  257. validator.check_positive_float(power, 'power', self.cls_name)
  258. validator.check_is_float(power, 'power', self.cls_name)
  259. self.decay_steps = decay_steps
  260. self.start_learning_rate = learning_rate
  261. self.end_learning_rate = end_learning_rate
  262. self.diff_learning_rate = learning_rate - end_learning_rate
  263. self.power = power
  264. self.update_decay_steps = update_decay_steps
  265. self.pow = P.Pow()
  266. self.ceil = P.Ceil()
  267. self.min = P.Minimum()
  268. self.max = P.Maximum()
  269. def construct(self, global_step):
  270. tmp_global_step = P.Cast()(global_step, mstype.float32)
  271. tmp_decay_step = self.decay_steps
  272. if self.update_decay_steps:
  273. tmp_decay_step = tmp_decay_step * self.max(self.ceil(tmp_global_step / tmp_decay_step), 1)
  274. else:
  275. tmp_global_step = self.min(tmp_global_step, tmp_decay_step)
  276. p = tmp_global_step / tmp_decay_step
  277. lr = self.diff_learning_rate * self.pow(1.0 - p, self.power) + self.end_learning_rate
  278. return lr
  279. class WarmUpLR(LearningRateSchedule):
  280. r"""
  281. Get learning rate warming up.
  282. For the i-th step, the formula of computing warmup_learning_rate[i] is:
  283. .. math::
  284. warmup\_learning\_rate[i] = learning\_rate * tmp\_step / warmup\_steps
  285. Where :
  286. .. math:
  287. tmp\_step=min(current\_step, warmup\_steps)
  288. Args:
  289. learning_rate (float): The initial value of learning rate.
  290. warmup_steps (int): The warm up steps of learning rate.
  291. Inputs:
  292. Tensor. The current step number.
  293. Returns:
  294. Tensor. The learning rate value for the current step.
  295. Examples:
  296. >>> learning_rate = 0.1
  297. >>> warmup_steps = 2
  298. >>> global_step = Tenosr(2, mstype.int32)
  299. >>> warmup_lr = WarmUpLR(learning_rate, warmup_steps)
  300. >>> warmup_lr(global_step)
  301. """
  302. def __init__(self, learning_rate, warmup_steps):
  303. super(WarmUpLR, self).__init__()
  304. if not isinstance(learning_rate, float):
  305. raise TypeError("learning_rate must be float.")
  306. validator.check_non_negative_float(learning_rate, "learning_rate", self.cls_name)
  307. validator.check_positive_int(warmup_steps, 'warmup_steps', self.cls_name)
  308. self.warmup_steps = warmup_steps
  309. self.learning_rate = learning_rate
  310. self.min = P.Minimum()
  311. self.cast = P.Cast()
  312. def construct(self, global_step):
  313. warmup_percent = self.cast(self.min(global_step, self.warmup_steps), mstype.float32)/ self.warmup_steps
  314. return self.learning_rate * warmup_percent
  315. __all__ = [
  316. 'ExponentialDecayLR',
  317. 'NaturalExpDecayLR',
  318. 'InverseDecayLR',
  319. 'CosineDecayLR',
  320. 'PolynomialDecayLR',
  321. 'WarmUpLR'
  322. ]