You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_gnn_aggregator.py 2.8 kB

5 years ago
5 years ago
5 years ago
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """test gnn aggregator."""
  16. import numpy as np
  17. from aggregator import MeanAggregator, AttentionHead, AttentionAggregator
  18. import mindspore.context as context
  19. import mindspore.nn as nn
  20. import mindspore.ops.composite as C
  21. from mindspore import Tensor
  22. from mindspore.common.api import _executor
  23. context.set_context(mode=context.GRAPH_MODE)
  24. grad_all_with_sens = C.GradOperation(get_all=True, sens_param=True)
  25. class MeanAggregatorGrad(nn.Cell):
  26. """Backward of MeanAggregator"""
  27. def __init__(self, network):
  28. super(MeanAggregatorGrad, self).__init__()
  29. self.grad_op = grad_all_with_sens
  30. self.network = network
  31. def construct(self, x, sens):
  32. grad_op = self.grad_op(self.network)(x, sens)
  33. return grad_op
  34. def test_MeanAggregator():
  35. """Compile MeanAggregator forward graph"""
  36. aggregator = MeanAggregator(32, 64, activation="relu", dropout_ratio=0.5)
  37. input_data = Tensor(np.array(np.random.rand(32, 3, 32), dtype=np.float32))
  38. _executor.compile(aggregator, input_data)
  39. def test_MeanAggregator_grad():
  40. """Compile MeanAggregator backward graph"""
  41. aggregator = MeanAggregator(32, 64, activation="relu", dropout_ratio=0.5)
  42. input_data = Tensor(np.array(np.random.rand(32, 3, 32), dtype=np.float32))
  43. sens = Tensor(np.ones([32, 64]).astype(np.float32))
  44. grad_op = MeanAggregatorGrad(aggregator)
  45. _executor.compile(grad_op, input_data, sens)
  46. def test_AttentionHead():
  47. """Compile AttentionHead forward graph"""
  48. head = AttentionHead(1433,
  49. 8,
  50. in_drop_ratio=0.6,
  51. coef_drop_ratio=0.6,
  52. residual=False)
  53. input_data = Tensor(np.array(np.random.rand(1, 2708, 1433), dtype=np.float32))
  54. biases = Tensor(np.array(np.random.rand(1, 2708, 2708), dtype=np.float32))
  55. _executor.compile(head, input_data, biases)
  56. def test_AttentionAggregator():
  57. input_data = Tensor(np.array(np.random.rand(1, 2708, 1433), dtype=np.float32))
  58. biases = Tensor(np.array(np.random.rand(1, 2708, 2708), dtype=np.float32))
  59. net = AttentionAggregator(1433, 8, 8)
  60. _executor.compile(net, input_data, biases)