You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

grad_sparse.py 3.1 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """bprop primitives"""
  16. from .. import functional as F
  17. from .. import operations as P
  18. from ..composite.multitype_ops.zeros_like_impl import zeros_like
  19. from .grad_base import bprops, bprop_getters
  20. # Unused parameters are placeholders.
  21. @bprops.register("MakeSparseTensor")
  22. def bprop_make_sparse_tensor(indices, values, dense_shape, out, dout):
  23. """Backpropagator for primitive `MakeSparseTensor`."""
  24. return zeros_like(indices), F.sparse_tensor_get_values(dout), ()
  25. @bprops.register("SparseTensorGetIndices")
  26. def bprop_sparse_tensor_get_indices(sparse_tensor, out, dout):
  27. """Backpropagator for primitive `SparseTensorGetIndices`."""
  28. return (zeros_like(sparse_tensor),)
  29. @bprops.register("SparseTensorGetValues")
  30. def bprop_sparse_tensor_get_values(sparse_tensor, out, dout):
  31. """Backpropagator for primitive `SparseTensorGetValues`."""
  32. return F.make_sparse_tensor(F.sparse_tensor_get_indices(sparse_tensor),
  33. dout,
  34. F.sparse_tensor_get_dense_shape(sparse_tensor))
  35. @bprops.register("SparseTensorGetDenseShape")
  36. def bprop_sparse_tensor_get_dense_shape(sparse_tensor, out, dout):
  37. """Backpropagator for primitive `SparseTensorGetDenseShape`."""
  38. return (zeros_like(sparse_tensor),)
  39. @bprop_getters.register(P.SparseToDense)
  40. def get_bprop_sparse_to_dense(self):
  41. """Generate bprop for SparseToDense"""
  42. def bprop(indices, values, dense_shape, out, dout):
  43. return zeros_like(indices), F.gather_nd(dout, indices), zeros_like(dense_shape)
  44. return bprop
  45. @bprop_getters.register(P.SparseTensorDenseMatmul)
  46. def get_bprop_sparse_tensor_dense_matmul(self):
  47. """Generate bprop for SparseTensorDenseMatmul"""
  48. adj_s = self.adjoint_st
  49. adj_d = self.adjoint_dt
  50. sparse_tensor_dense_mat_mul = P.SparseTensorDenseMatmul(not adj_s)
  51. def bprop(indices, values, dense_shape, dense, out, dout):
  52. dense_grad = sparse_tensor_dense_mat_mul(indices, values, dense_shape, dout)
  53. perm = (1, 0)
  54. if adj_d:
  55. dense_grad = F.transpose(dense_grad, perm)
  56. rows = indices[:, 0]
  57. cols = indices[:, 1]
  58. parts_a = F.gather(dout, cols if adj_s else rows, 0)
  59. parts_b = F.gather(F.transpose(dense, perm) if adj_d else dense, rows if adj_s else cols, 0)
  60. values_grad = F.reduce_sum(parts_a * parts_b, 1)
  61. return zeros_like(indices), values_grad, zeros_like(dense_shape), dense_grad
  62. return bprop