You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_adafactor.py 3.6 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103
  1. # Copyright 2021-2022 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import numpy as np
  15. import mindspore as ms
  16. from mindspore import context, Tensor, Parameter
  17. from mindspore.common.api import _cell_graph_executor
  18. from mindspore.nn import Cell, TrainOneStepCell
  19. from mindspore.nn.optim.adafactor import AdaFactor
  20. from mindspore.ops import operations as P
  21. class Net(Cell):
  22. def __init__(self, add_weight, matmul_weight, bias, strategy1=None, strategy2=None):
  23. super().__init__()
  24. self.add = P.TensorAdd()
  25. self.matmul = P.MatMul().shard(strategy1)
  26. self.add2 = P.TensorAdd().shard(strategy2)
  27. self.add_weight = Parameter(add_weight, "w1")
  28. self.mul_weight = Parameter(matmul_weight, "w2")
  29. self.bias = Parameter(bias, "bias")
  30. self.reshape = P.Reshape()
  31. def construct(self, x, b):
  32. out = self.add(x, self.add_weight)
  33. out = self.reshape(out, (64, 32))
  34. out = self.matmul(out, self.mul_weight)
  35. out = self.add2(out, self.bias)
  36. return out
  37. _x = Tensor(np.ones([64, 16, 2]), dtype=ms.float32)
  38. _w0 = Tensor(np.ones([64, 16, 2]), dtype=ms.float32)
  39. _w1 = Tensor(np.ones([32, 32]), dtype=ms.float32)
  40. _w2 = Tensor(np.ones([1, 32]), dtype=ms.float32)
  41. _b = Tensor(np.ones([64, 16, 2]), dtype=ms.float32)
  42. def compile_net(net):
  43. scale_parameter = False
  44. relative_step = True
  45. warmup_init = True
  46. compression = True
  47. optimizer = AdaFactor(net.trainable_params(), learning_rate=None, weight_decay=0.9,
  48. scale_parameter=scale_parameter, relative_step=relative_step,
  49. warmup_init=warmup_init, compression=compression)
  50. train_net = TrainOneStepCell(net, optimizer)
  51. train_net.set_auto_parallel()
  52. train_net.set_train()
  53. _cell_graph_executor.compile(train_net, _x, _b)
  54. context.reset_auto_parallel_context()
  55. def test_opt_data_parallel():
  56. """
  57. Feature: test adafactor data parallel
  58. Description:
  59. Expectation: compile success
  60. """
  61. context.set_auto_parallel_context(parallel_mode="semi_auto_parallel", device_num=16, global_rank=0)
  62. strategy1 = ((16, 1), (1, 1))
  63. strategy2 = ((16, 1), (1, 1))
  64. net = Net(_w0, _w1, _w2, strategy1, strategy2)
  65. compile_net(net)
  66. def test_opt_model_parallel():
  67. """
  68. Feature: test adafactor model parallel
  69. Description:
  70. Expectation: compile success
  71. """
  72. context.set_auto_parallel_context(parallel_mode="semi_auto_parallel", device_num=16, global_rank=0)
  73. strategy1 = ((4, 2), (2, 2))
  74. strategy2 = ((4, 2), (1, 2))
  75. net = Net(_w0, _w1, _w2, strategy1, strategy2)
  76. compile_net(net)
  77. def test_opt_shard():
  78. """
  79. Feature: test adafactor optimizer parallel
  80. Description: only shard batch dimension
  81. Expectation: compile success
  82. """
  83. context.set_auto_parallel_context(parallel_mode="semi_auto_parallel", device_num=16, global_rank=0,
  84. enable_parallel_optimizer=True)
  85. strategy1 = ((4, 2), (2, 2))
  86. strategy2 = ((4, 2), (1, 2))
  87. net = Net(_w0, _w1, _w2, strategy1, strategy2)
  88. compile_net(net)