You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_pynative_model.py 4.9 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """ test_pynative_model """
  16. import numpy as np
  17. import mindspore.nn as nn
  18. from mindspore import Parameter, ParameterTuple, Tensor
  19. from mindspore import context
  20. from mindspore.nn.optim import Momentum
  21. from mindspore.ops import composite as C
  22. from mindspore.ops import operations as P
  23. from ..ut_filter import non_graph_engine
  24. def setup_module(module):
  25. context.set_context(mode=context.PYNATIVE_MODE)
  26. class GradWrap(nn.Cell):
  27. """ GradWrap definition """
  28. def __init__(self, network):
  29. super(GradWrap, self).__init__()
  30. self.network = network
  31. self.weights = ParameterTuple(network.get_parameters())
  32. def construct(self, x, label):
  33. weights = self.weights
  34. return C.grad_by_list(self.network, weights)(x, label)
  35. @non_graph_engine
  36. def test_softmaxloss_grad():
  37. """ test_softmaxloss_grad """
  38. class NetWithLossClass(nn.Cell):
  39. """ NetWithLossClass definition """
  40. def __init__(self, network):
  41. super(NetWithLossClass, self).__init__()
  42. self.loss = nn.SoftmaxCrossEntropyWithLogits()
  43. self.network = network
  44. def construct(self, x, label):
  45. predict = self.network(x)
  46. return self.loss(predict, label)
  47. class Net(nn.Cell):
  48. """ Net definition """
  49. def __init__(self):
  50. super(Net, self).__init__()
  51. self.weight = Parameter(Tensor(np.ones([64, 10]).astype(np.float32)), name="weight")
  52. self.bias = Parameter(Tensor(np.ones([10]).astype(np.float32)), name="bias")
  53. self.fc = P.MatMul()
  54. self.biasAdd = P.BiasAdd()
  55. def construct(self, x):
  56. x = self.biasAdd(self.fc(x, self.weight), self.bias)
  57. return x
  58. net = GradWrap(NetWithLossClass(Net()))
  59. predict = Tensor(np.ones([1, 64]).astype(np.float32))
  60. label = Tensor(np.zeros([1, 10]).astype(np.float32))
  61. print("pynative run")
  62. out = net.construct(predict, label)
  63. print("out:", out)
  64. print(out[0], (out[0]).asnumpy(), ":result")
  65. @non_graph_engine
  66. def test_lenet_grad():
  67. """ test_lenet_grad """
  68. class NetWithLossClass(nn.Cell):
  69. """ NetWithLossClass definition """
  70. def __init__(self, network):
  71. super(NetWithLossClass, self).__init__()
  72. self.loss = nn.SoftmaxCrossEntropyWithLogits()
  73. self.network = network
  74. def construct(self, x, label):
  75. predict = self.network(x)
  76. return self.loss(predict, label)
  77. class LeNet5(nn.Cell):
  78. """ LeNet5 definition """
  79. def __init__(self):
  80. super(LeNet5, self).__init__()
  81. self.conv1 = nn.Conv2d(1, 6, 5, pad_mode='valid')
  82. self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')
  83. self.fc1 = nn.Dense(16 * 5 * 5, 120)
  84. self.fc2 = nn.Dense(120, 84)
  85. self.fc3 = nn.Dense(84, 10)
  86. self.relu = nn.ReLU()
  87. self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
  88. self.flatten = P.Flatten()
  89. def construct(self, x):
  90. x = self.max_pool2d(self.relu(self.conv1(x)))
  91. x = self.max_pool2d(self.relu(self.conv2(x)))
  92. x = self.flatten(x)
  93. x = self.relu(self.fc1(x))
  94. x = self.relu(self.fc2(x))
  95. x = self.fc3(x)
  96. return x
  97. input_data = Tensor(np.ones([1, 1, 32, 32]).astype(np.float32) * 0.01)
  98. label = Tensor(np.ones([1, 10]).astype(np.float32))
  99. iteration_num = 1
  100. verification_step = 0
  101. net = LeNet5()
  102. loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False)
  103. momen_opti = Momentum(net.trainable_params(), learning_rate=0.1, momentum=0.9)
  104. train_net = GradWrap(NetWithLossClass(net))
  105. train_net.set_train()
  106. for i in range(0, iteration_num):
  107. # get the gradients
  108. grads = train_net(input_data, label)
  109. # update parameters
  110. success = momen_opti(grads)
  111. if success is False:
  112. print("fail to run optimizer")
  113. # verification
  114. if i == verification_step:
  115. fw_output = net(input_data)
  116. loss_output = loss(fw_output, label)
  117. print("The loss of %s-th iteration is %s" % (i, loss_output.asnumpy()))