You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_var_grad.py 11 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. import numpy as np
  16. import mindspore.ops.composite as C
  17. from mindspore import Tensor, Parameter
  18. from mindspore import context
  19. from mindspore.common import dtype as mstype
  20. from mindspore.common.parameter import ParameterTuple
  21. from mindspore.nn import Cell
  22. from mindspore.ops import operations as P
  23. context.set_context(mode=context.GRAPH_MODE)
  24. def test_net_vargs_expand():
  25. class AddNet(Cell):
  26. def __init__(self):
  27. super(AddNet, self).__init__()
  28. self.w = Parameter(
  29. Tensor(np.ones((3, 4, 5), np.float32)), "w2", requires_grad=True)
  30. def construct(self, x, y):
  31. return x + y
  32. x = Tensor(np.random.normal(0, 1, [3, 4, 5]).astype(np.float32))
  33. y = Tensor(np.random.normal(0, 1, [3, 4, 5]).astype(np.float32))
  34. sens = Tensor(np.random.normal(0, 1, [3, 4, 5]).astype(np.float32))
  35. net = AddNet()
  36. _ = C.grad_all_with_sens(net, net.trainable_params())(x, y, sens)
  37. class VarNet(Cell):
  38. def __init__(self, net):
  39. super(VarNet, self).__init__()
  40. self.b = Parameter(
  41. Tensor(np.ones([3, 4, 5]), dtype=mstype.float32), "b", requires_grad=True)
  42. self.w = Parameter(
  43. Tensor(np.ones([3, 4, 5]), dtype=mstype.float32), "w", requires_grad=True)
  44. self.net = net
  45. def construct(self, *args):
  46. return self.net(*args) * self.w + self.b
  47. class SecondNet(Cell):
  48. def __init__(self):
  49. super(SecondNet, self).__init__()
  50. self.b2 = Parameter(
  51. Tensor(np.ones([3, 4, 5]), dtype=mstype.float32), "b2", requires_grad=True)
  52. def construct(self, *args):
  53. res = args[0] + args[1]
  54. return res + self.b2
  55. class Bprop(Cell):
  56. def __init__(self, func, wrt_params, params, grad_op, sens=None):
  57. super(Bprop, self).__init__(auto_prefix=False)
  58. self.func = func
  59. self.wrt_params = wrt_params
  60. self.params = None
  61. if self.wrt_params and params:
  62. self.params = ParameterTuple(params)
  63. self.grad = grad_op
  64. self.with_sens = False
  65. self.sens = sens
  66. if sens:
  67. self.sens = Tensor(sens, dtype=mstype.float32)
  68. self.with_sens = True
  69. def construct(self, *inputs):
  70. # pylint: disable=no-else-return
  71. if self.wrt_params:
  72. if self.with_sens:
  73. return self.grad(self.func, self.params)(*inputs, self.sens)
  74. else:
  75. return self.grad(self.func, self.params)(*inputs)
  76. elif self.with_sens:
  77. return self.grad(self.func)(*inputs, self.sens)
  78. else:
  79. return self.grad(self.func)(*inputs)
  80. def test_all_var_args_grad_with_sens():
  81. """"test grad_by_list_with_sens with all var args input"""
  82. class GradNet(Cell):
  83. def __init__(self, net):
  84. super(GradNet, self).__init__()
  85. self.weights = ParameterTuple(net.trainable_params())
  86. self.net = net
  87. def construct(self, *inputs):
  88. return C.grad_by_list_with_sens(self.net, self.weights)(*inputs)
  89. x = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  90. y = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  91. sens = Tensor(1.0, dtype=mstype.float32)
  92. net = VarNet(SecondNet())
  93. grad_net = GradNet(net)
  94. _ = grad_net(x, y, sens)
  95. def test_grad_list_var_args():
  96. class GradNet(Cell):
  97. def __init__(self, net):
  98. super(GradNet, self).__init__()
  99. self.weights = ParameterTuple(net.trainable_params())
  100. self.net = net
  101. def construct(self, *inputs):
  102. return C.grad_by_list(self.net, self.weights)(*inputs)
  103. x = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  104. y = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  105. net = VarNet(SecondNet())
  106. grad_net = GradNet(net)
  107. _ = grad_net(x, y)
  108. def test_grad_all_var_args():
  109. class GradNet(Cell):
  110. def __init__(self, net):
  111. super(GradNet, self).__init__()
  112. self.weights = ParameterTuple(net.trainable_params())
  113. self.net = net
  114. def construct(self, *inputs):
  115. return C.grad_all(self.net)(*inputs)
  116. x = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  117. y = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  118. net = VarNet(SecondNet())
  119. grad_net = GradNet(net)
  120. _ = grad_net(x, y)
  121. def test_grad_all_var_args_with_sens():
  122. class GradNet(Cell):
  123. def __init__(self, net):
  124. super(GradNet, self).__init__()
  125. self.weights = ParameterTuple(net.trainable_params())
  126. self.net = net
  127. def construct(self, *inputs):
  128. return C.grad_all_with_sens(self.net)(*inputs)
  129. x = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  130. y = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  131. sens = Tensor(1.0, dtype=mstype.float32)
  132. net = VarNet(SecondNet())
  133. grad_net = GradNet(net)
  134. _ = grad_net(x, y, sens)
  135. def test_grad_var_args_with_sens():
  136. class GradNet(Cell):
  137. def __init__(self, net):
  138. super(GradNet, self).__init__()
  139. self.weights = ParameterTuple(net.trainable_params())
  140. self.net = net
  141. def construct(self, *inputs):
  142. return C.grad_with_sens(self.net)(*inputs)
  143. x = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  144. y = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  145. sens = Tensor(1.0, dtype=mstype.float32)
  146. net = VarNet(SecondNet())
  147. grad_net = GradNet(net)
  148. _ = grad_net(x, y, sens)
  149. def test_var_args_grad():
  150. class VarNet(Cell):
  151. def __init__(self, net):
  152. super(VarNet, self).__init__()
  153. self.b = Parameter(
  154. Tensor(np.ones([3, 4, 5]), dtype=mstype.float32), "b", requires_grad=True)
  155. self.net = net
  156. def construct(self, *args):
  157. return self.net(*args) + self.b
  158. class SecondNet(Cell):
  159. def __init__(self):
  160. super(SecondNet, self).__init__()
  161. self.b2 = Parameter(
  162. Tensor(np.ones([3, 4, 5]), dtype=mstype.float32), "b2", requires_grad=True)
  163. def construct(self, *args):
  164. res = args[0] + args[1]
  165. return res + self.b2
  166. class GradNet(Cell):
  167. def __init__(self, net):
  168. super(GradNet, self).__init__()
  169. self.net = net
  170. self.weights = ParameterTuple(net.trainable_params())
  171. def construct(self, x, y, sens):
  172. return C.grad_by_list_with_sens(self.net, self.weights)(x, y, sens)
  173. x = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  174. y = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  175. sens = Tensor(1.0, dtype=mstype.float32)
  176. net = VarNet(SecondNet())
  177. grad_net = GradNet(net)
  178. _ = grad_net(x, y, sens)
  179. def test_var_args_positional():
  180. """"test grad_all with var args in inner graph"""
  181. class VarNet(Cell):
  182. def __init__(self, net):
  183. super(VarNet, self).__init__()
  184. self.net = net
  185. def construct(self, x, y):
  186. return self.net(x, y) * x
  187. class SecondNet(Cell):
  188. def __init__(self):
  189. super(SecondNet, self).__init__()
  190. def construct(self, *args):
  191. return args[0] + args[1]
  192. class GradNet(Cell):
  193. def __init__(self, net):
  194. super(GradNet, self).__init__()
  195. self.net = net
  196. self.weights = ParameterTuple(net.trainable_params())
  197. def construct(self, x, y):
  198. return C.grad_all(self.net)(x, y)
  199. x = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  200. y = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  201. net = VarNet(SecondNet())
  202. grad_net = GradNet(net)
  203. _ = grad_net(x, y)
  204. def test_grad_within_if_else():
  205. class GradNet(Cell):
  206. def __init__(self, net):
  207. super(GradNet, self).__init__()
  208. self.weights = ParameterTuple(net.trainable_params())
  209. self.net = net
  210. grad_op = C.GradOperation(
  211. name='grad', get_all=False, get_by_list=True, sens_param=True)
  212. self.grad = Bprop(self.net, True, self.weights, grad_op, 1.0)
  213. def construct(self, *inputs):
  214. return self.grad(*inputs)
  215. x = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  216. y = Tensor(np.ones([3, 4, 5]), dtype=mstype.float32)
  217. _ = Tensor(1.0, dtype=mstype.float32)
  218. net = VarNet(SecondNet())
  219. grad_net = GradNet(net)
  220. out = grad_net(x, y)
  221. print("test_grad_var_args_with_sens out=", out)
  222. def test_grad_for_concat():
  223. class GradNet(Cell):
  224. def __init__(self, net):
  225. super(GradNet, self).__init__()
  226. self.weights = ParameterTuple(net.trainable_params())
  227. self.net = net
  228. grad_op = C.GradOperation(
  229. name='grad', get_all=True, get_by_list=False, sens_param=True)
  230. self.grad = Bprop(self.net, False, self.weights, grad_op)
  231. def construct(self, *inputs):
  232. return self.grad(*inputs)
  233. class Concat(Cell):
  234. def __init__(self, axis):
  235. super().__init__()
  236. self.concat = P.Concat(axis=axis)
  237. def construct(self, *input1):
  238. return self.concat(input1)
  239. class ConcatFactory:
  240. def __init__(self, input_shape, axis, dtype=np.float32):
  241. super(ConcatFactory, self).__init__()
  242. self.inputs_np = []
  243. for s in input_shape:
  244. self.inputs_np.append(np.random.randn(*s).astype(dtype))
  245. self.axis = axis
  246. self.out_numpy = np.concatenate(self.inputs_np, axis=self.axis)
  247. self.out_grad_np = self.out_numpy
  248. def grad_mindspore_impl(self):
  249. inputs = []
  250. for i in self.inputs_np:
  251. inputs.append(Tensor(i))
  252. net = Concat(axis=self.axis)
  253. grad_net = GradNet(net)
  254. grad_net.set_train()
  255. _ = grad_net(*inputs, Tensor(self.out_grad_np))
  256. def grad_cmp(self):
  257. self.grad_mindspore_impl()
  258. fact = ConcatFactory(input_shape=(
  259. (2, 184320, 1), (2, 46080, 1), (2, 11520, 1), (2, 2880, 1), (2, 720, 1)), axis=1)
  260. fact.grad_cmp()