You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

grad.cpp 4.4 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144
  1. /**
  2. * \file imperative/python/src/grad.cpp
  3. * MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
  4. *
  5. * Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
  6. *
  7. * Unless required by applicable law or agreed to in writing,
  8. * software distributed under the License is distributed on an
  9. * "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10. */
  11. #include "./grad.h"
  12. #include "megbrain/imperative/backward_graph_opt.h"
  13. #include "megbrain/imperative/ops/autogen.h"
  14. #include "megbrain/imperative/proxy_graph_detail.h"
  15. #include "megbrain/utils/mempool.h"
  16. #include "range/v3/all.hpp"
  17. #include "./transformation.h"
  18. namespace py = pybind11;
  19. namespace views = ranges::views;
  20. namespace mgb::imperative::python {
  21. namespace {
  22. std::unordered_map<std::shared_ptr<GradKey>, GradKeyWrapper*> grad_key_map;
  23. }
  24. GradKeyWrapper::GradKeyWrapper() : m_key(std::make_shared<GradKey>()) {
  25. grad_key_map[m_key] = this;
  26. }
  27. void GradKeyWrapper::attach(PyObject* const* args, size_t nargs) {
  28. if (nargs != 2) {
  29. throw py::type_error("expect 2 arguments");
  30. }
  31. auto* tw = TensorWrapper::try_cast(args[0]);
  32. if (!tw) {
  33. throw py::type_error("argument 1 must be Tensor");
  34. }
  35. py::object callback;
  36. if (args[1] != Py_None) {
  37. callback = py::reinterpret_borrow<py::object>(args[1]);
  38. }
  39. GenericFunction generic_callback =
  40. [=](Span<ValueRef> inputs) -> std::vector<ValueRef> {
  41. mgb_assert(inputs.size() == 1);
  42. if (callback) {
  43. callback(TensorWrapper::make(py_tensor_type, inputs[0]));
  44. }
  45. return {};
  46. };
  47. tw->m_tensor->reset(imperative::apply(
  48. AttachGrad(m_key), tw->m_tensor->data(),
  49. FunctionValue::make(generic_callback))[0]);
  50. }
  51. void GradKeyWrapper::backward(GradKeyWrapper* self, py::list tensors, py::list grads) {
  52. std::vector<ValueRef> args;
  53. mgb_assert(tensors.size() == grads.size());
  54. for (auto&& tensor : tensors) {
  55. args.push_back(TensorWrapper::try_cast(tensor.ptr())->m_tensor->data());
  56. }
  57. for (auto&& grad : grads) {
  58. args.push_back(TensorWrapper::try_cast(grad.ptr())->m_tensor->data());
  59. }
  60. imperative::apply(GradBackward(self->m_key), {args.data(), args.size()});
  61. }
  62. pybind11::function GradKeyWrapper::get_backward_closure(
  63. GradKeyWrapper* self, py::list tensors) {
  64. std::vector<ValueRef> args;
  65. for (auto&& tensor : tensors) {
  66. args.push_back(TensorWrapper::try_cast(tensor.ptr())->m_tensor->data());
  67. }
  68. auto closure = imperative::apply(GetBackwardColsure(self->m_key), args)[0]
  69. .as<FunctionValue>();
  70. auto py_function = [closure](std::vector<TensorWrapper*> tensors) {
  71. std::vector<ValueRef> args;
  72. for (auto* tw : tensors) {
  73. args.push_back(tw->m_tensor->data());
  74. }
  75. (*closure)(args);
  76. };
  77. return pybind11::cpp_function(py_function);
  78. }
  79. PyObject* GradKeyWrapper::get_name() {
  80. return py::cast(m_key->name()).release().ptr();
  81. }
  82. void GradKeyWrapper::set_name(py::handle name) {
  83. m_key->name(py::cast<std::string>(name));
  84. }
  85. PyObject* GradKeyWrapper::is_attached_to(PyObject* const* args, size_t nargs) {
  86. if (nargs != 1) {
  87. PyErr_SetString(PyExc_TypeError, "expect 1 argument");
  88. return nullptr;
  89. }
  90. auto* tw = TensorWrapper::try_cast(args[0]);
  91. if (!tw) {
  92. PyErr_SetString(PyExc_TypeError, "expect Tensor");
  93. return nullptr;
  94. }
  95. if (imperative::apply(IsAttachedTo(m_key), tw->m_tensor->data())[0]
  96. .cast<BoolValue>()) {
  97. Py_RETURN_TRUE;
  98. }
  99. Py_RETURN_FALSE;
  100. }
  101. void GradKeyWrapper::enter() {
  102. m_transformation = std::make_shared<GradTransformation>(m_key);
  103. TransformationManager::get_instance().register_at<TransformationManager::Grad>(
  104. m_transformation);
  105. }
  106. void GradKeyWrapper::exit() {
  107. TransformationManager::get_instance().unregister<TransformationManager::Grad>(
  108. m_transformation);
  109. m_transformation.reset();
  110. }
  111. void GradKeyWrapper::suppress() {
  112. m_transformation->suppress();
  113. }
  114. void GradKeyWrapper::resume() {
  115. m_transformation->resume();
  116. }
  117. GradKeyWrapper* GradKeyWrapper::get(std::shared_ptr<GradKey> key) {
  118. return grad_key_map.at(key);
  119. }
  120. GradKeyWrapper::~GradKeyWrapper() {
  121. grad_key_map.erase(m_key);
  122. }
  123. } // namespace mgb::imperative::python