You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

relu_ad.py 1.8 kB

5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960
  1. #!/usr/bin/env python3
  2. # coding: utf-8
  3. # Copyright 2019 Huawei Technologies Co., Ltd
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. """operator dsl function: relu_ad"""
  17. import akg
  18. import akg.tvm
  19. from akg.utils import custom_tiling as ct_util
  20. from akg.utils import validation_check as vc_util
  21. from akg.ops.nn import relu
  22. from akg.dim import DIM
  23. relu_ad_set_dim_map = {
  24. }
  25. def relu_ad_set_dim_func(head, a):
  26. """set dim info"""
  27. key = []
  28. key.append(tuple(a.shape))
  29. key.append(a.dtype)
  30. hash_key = str(tuple(key))
  31. if hash_key in relu_ad_set_dim_map.keys():
  32. return ct_util.set_dims(relu_ad_set_dim_map[hash_key]), hash_key
  33. return "", hash_key
  34. @ct_util.reg_set_dim_func(relu_ad_set_dim_func)
  35. @vc_util.check_input_type(akg.tvm.tensor.Tensor, akg.tvm.tensor.Tensor)
  36. def relu_ad(head, a):
  37. """
  38. Compute gradient of relu operator using automatic differentiate.
  39. Args:
  40. head (tvm.tensor.Tensor): Tensor of type float16, float32, int8, uint8, int32.
  41. a (tvm.tensor.Tensor): Tensor of type float16, float32, int8, uint8, int32.
  42. Returns:
  43. tvm.tensor.Tensor with the same shape as input.
  44. """
  45. dim_info, _ = relu_ad_set_dim_func(head, a)
  46. attrs = {DIM: dim_info}
  47. b = relu.relu(a)
  48. jacs = list(akg.differentiate(b, [a], head))
  49. return jacs[0], attrs