You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

session.cc 6.1 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include <memory>
  17. #include <algorithm>
  18. #include "include/inference.h"
  19. #include "session/session.h"
  20. #include "utils/load_onnx/anf_converter.h"
  21. #include "session/session_basic.h"
  22. #include "session/session_factory.h"
  23. #include "utils/base_ref_utils.h"
  24. #include "kernel/oplib/oplib.h"
  25. #ifdef ENABLE_D
  26. #include "utils/context/ms_context.h"
  27. #include "session/ascend_session.h"
  28. #else
  29. #include "session/cpu_session.h"
  30. #endif
  31. namespace py = pybind11;
  32. namespace mindspore::inference {
  33. std::shared_ptr<FuncGraph> LoadModel(const char *model_buf, size_t size, const std::string &device) {
  34. inference::Session::RegAllOp();
  35. auto anf_graph = lite::AnfConverter::RunAnfConverter(model_buf, size);
  36. return anf_graph;
  37. }
  38. void ExitInference() {
  39. auto ms_context = MsContext::GetInstance();
  40. if (ms_context == nullptr) {
  41. MS_LOG(ERROR) << "Get Context failed!";
  42. return;
  43. }
  44. if (!ms_context->CloseTsd()) {
  45. MS_LOG(ERROR) << "Inference CloseTsd failed!";
  46. return;
  47. }
  48. }
  49. std::shared_ptr<MSSession> MSSession::CreateSession(const std::string &device, uint32_t device_id) {
  50. auto session = std::make_shared<inference::Session>();
  51. auto ret = session->Init(device, device_id);
  52. if (ret != 0) {
  53. return nullptr;
  54. }
  55. return session;
  56. }
  57. void Session::RegAllOp() {
  58. static std::mutex init_mutex;
  59. static bool Initialized = false;
  60. std::lock_guard<std::mutex> lock(init_mutex);
  61. if (Initialized) {
  62. return;
  63. }
  64. Initialized = true;
  65. MsContext::GetInstance()->set_execution_mode(kGraphMode);
  66. Py_Initialize();
  67. auto c_expression = PyImport_ImportModule("mindspore._c_expression");
  68. if (c_expression == nullptr) {
  69. MS_LOG(EXCEPTION) << "Failed to import mindspore._c_expression module.";
  70. return;
  71. }
  72. PyObject *c_expression_dict = PyModule_GetDict(c_expression);
  73. PyObject *op_info_loader_class = PyDict_GetItemString(c_expression_dict, "OpInfoLoaderPy");
  74. if (op_info_loader_class == nullptr) {
  75. MS_LOG(EXCEPTION) << "Failed to get op_info_loader_class from mindspore._c_expression.";
  76. return;
  77. }
  78. PyObject *op_info_loader = PyInstanceMethod_New(op_info_loader_class);
  79. if (op_info_loader == nullptr) {
  80. MS_LOG(EXCEPTION) << "Failed to create op_info_loader instance.";
  81. return;
  82. }
  83. PyObject *op_info_loader_ins = PyObject_CallObject(op_info_loader, nullptr);
  84. if (op_info_loader_ins == nullptr) {
  85. MS_LOG(EXCEPTION) << "Failed to call op_info_loader instance.";
  86. return;
  87. }
  88. auto all_ops_info_vector_addr_ul = PyObject_CallMethod(op_info_loader_ins, "get_all_ops_info", nullptr);
  89. if (all_ops_info_vector_addr_ul == nullptr) {
  90. MS_LOG(EXCEPTION) << "Failed to call get_all_ops_addr.";
  91. return;
  92. }
  93. auto all_ops_info_vector_addr = PyLong_AsVoidPtr(all_ops_info_vector_addr_ul);
  94. auto all_ops_info = static_cast<std::vector<kernel::OpInfo *> *>(all_ops_info_vector_addr);
  95. for (auto op_info : *all_ops_info) {
  96. kernel::OpLib::RegOpInfo(std::shared_ptr<kernel::OpInfo>(op_info));
  97. }
  98. all_ops_info->clear();
  99. delete all_ops_info;
  100. Py_DECREF(op_info_loader);
  101. Py_DECREF(op_info_loader_class);
  102. Py_DECREF(c_expression_dict);
  103. Py_DECREF(c_expression);
  104. return;
  105. }
  106. uint32_t Session::CompileGraph(std::shared_ptr<FuncGraph> funcGraphPtr) {
  107. MS_ASSERT(session_impl_ != nullptr);
  108. auto graph_id = session_impl_->CompileGraph(NOT_NULL(funcGraphPtr));
  109. py::gil_scoped_release gil_release;
  110. return graph_id;
  111. }
  112. MultiTensor Session::RunGraph(uint32_t graph_id, const std::vector<std::shared_ptr<inference::MSTensor>> &inputs) {
  113. std::vector<tensor::TensorPtr> inTensors;
  114. inTensors.resize(inputs.size());
  115. bool has_error = false;
  116. std::transform(inputs.begin(), inputs.end(), inTensors.begin(),
  117. [&has_error](const std::shared_ptr<inference::MSTensor> &tensor_ptr) -> tensor::TensorPtr {
  118. if (tensor_ptr == nullptr) {
  119. MS_LOG(WARNING) << "input MSTensor is nullptr, return nullptr";
  120. has_error = true;
  121. return nullptr;
  122. }
  123. auto tensor = static_cast<inference::Tensor *>(tensor_ptr.get());
  124. if (tensor == nullptr) {
  125. MS_LOG(ERROR) << "Can not cast input MSTensor to tensor";
  126. has_error = true;
  127. return nullptr;
  128. }
  129. return tensor->tensor();
  130. });
  131. if (has_error) {
  132. MS_LOG(ERROR) << "Init Tensor failed, returning empty result";
  133. std::vector<std::shared_ptr<inference::MSTensor>> multiTensor;
  134. return multiTensor;
  135. }
  136. VectorRef outputs;
  137. session_impl_->RunGraph(graph_id, inTensors, &outputs);
  138. return TransformVectorRefToMultiTensor(outputs);
  139. }
  140. int Session::Init(const std::string &device, uint32_t device_id) {
  141. RegAllOp();
  142. auto ms_context = MsContext::GetInstance();
  143. ms_context->set_execution_mode(kGraphMode);
  144. ms_context->set_device_target(kAscendDevice);
  145. session_impl_ = session::SessionFactory::Get().Create(device);
  146. if (session_impl_ == nullptr) {
  147. MS_LOG(ERROR) << "Session create failed!, please make sure target device:" << device << " is available.";
  148. return -1;
  149. }
  150. session_impl_->Init(device_id);
  151. if (ms_context == nullptr) {
  152. MS_LOG(ERROR) << "Get Context failed!";
  153. return -1;
  154. }
  155. if (!ms_context->OpenTsd()) {
  156. MS_LOG(ERROR) << "Session init OpenTsd failed!";
  157. return -1;
  158. }
  159. return 0;
  160. }
  161. Session::Session() = default;
  162. } // namespace mindspore::inference