You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

acl_session.cc 8.3 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include <memory>
  17. #include <algorithm>
  18. #include <fstream>
  19. #include "serving/acl/acl_session.h"
  20. #include "include/infer_log.h"
  21. namespace mindspore::inference {
  22. std::shared_ptr<InferSession> InferSession::CreateSession(const std::string &device, uint32_t device_id) {
  23. try {
  24. auto session = std::make_shared<AclSession>();
  25. auto ret = session->InitEnv(device, device_id);
  26. if (ret != SUCCESS) {
  27. return nullptr;
  28. }
  29. return session;
  30. } catch (std::exception &e) {
  31. MSI_LOG_ERROR << "Inference CreatSession failed";
  32. return nullptr;
  33. }
  34. }
  35. Status AclSession::LoadModelFromFile(const std::string &file_name, uint32_t &model_id) {
  36. Status ret = model_process_.LoadModelFromFile(file_name, model_id);
  37. if (ret != SUCCESS) {
  38. MSI_LOG_ERROR << "Load model from file failed, model file " << file_name;
  39. return FAILED;
  40. }
  41. std::string dvpp_config_file;
  42. auto index = file_name.rfind(".");
  43. if (index == std::string::npos) {
  44. dvpp_config_file = file_name;
  45. } else {
  46. dvpp_config_file = file_name.substr(0, index);
  47. }
  48. dvpp_config_file += "_dvpp_config.json";
  49. std::ifstream fp(dvpp_config_file);
  50. if (!fp.is_open()) {
  51. MSI_LOG_INFO << "Dvpp config file not exist, model will execute with tensors as inputs, dvpp config file "
  52. << dvpp_config_file;
  53. return SUCCESS;
  54. }
  55. fp.close();
  56. if (dvpp_process_.InitWithJsonConfig(dvpp_config_file) != SUCCESS) {
  57. MSI_LOG_ERROR << "Dvpp config file parse error, dvpp config file " << dvpp_config_file;
  58. return FAILED;
  59. }
  60. execute_with_dvpp_ = true;
  61. MSI_LOG_INFO << "Dvpp config success";
  62. return SUCCESS;
  63. }
  64. Status AclSession::UnloadModel(uint32_t /*model_id*/) {
  65. model_process_.UnLoad();
  66. return SUCCESS;
  67. }
  68. Status AclSession::ExecuteModel(uint32_t /*model_id*/, const RequestBase &request,
  69. ReplyBase &reply) { // set d context
  70. aclError rt_ret = aclrtSetCurrentContext(context_);
  71. if (rt_ret != ACL_ERROR_NONE) {
  72. MSI_LOG_ERROR << "set the ascend device context failed";
  73. return FAILED;
  74. }
  75. return model_process_.Execute(request, reply);
  76. }
  77. Status AclSession::PreProcess(uint32_t /*model_id*/, const InferImagesBase *images_input,
  78. ImagesDvppOutput &dvpp_output) {
  79. if (images_input == nullptr) {
  80. MSI_LOG_ERROR << "images input is nullptr";
  81. return FAILED;
  82. }
  83. auto batch_size = images_input->batch_size();
  84. if (batch_size <= 0) {
  85. MSI_LOG_ERROR << "invalid batch size " << images_input->batch_size();
  86. return FAILED;
  87. }
  88. std::vector<const void *> pic_buffer_list;
  89. std::vector<size_t> pic_size_list;
  90. for (size_t i = 0; i < batch_size; i++) {
  91. const void *pic_buffer = nullptr;
  92. uint32_t pic_size = 0;
  93. if (!images_input->get(i, pic_buffer, pic_size) || pic_buffer == nullptr || pic_size == 0) {
  94. MSI_LOG_ERROR << "Get request " << 0 << "th buffer failed";
  95. return FAILED;
  96. }
  97. pic_buffer_list.push_back(pic_buffer);
  98. pic_size_list.push_back(pic_size);
  99. }
  100. auto ret = dvpp_process_.Process(pic_buffer_list, pic_size_list, dvpp_output.buffer_device, dvpp_output.buffer_size);
  101. if (ret != SUCCESS) {
  102. MSI_LOG_ERROR << "dvpp process failed";
  103. return ret;
  104. }
  105. return SUCCESS;
  106. }
  107. Status AclSession::ExecuteModel(uint32_t model_id, const ImagesRequestBase &images_inputs, // images for preprocess
  108. const RequestBase &request, ReplyBase &reply) {
  109. if (!execute_with_dvpp_) {
  110. MSI_LOG_ERROR << "Unexpected images as inputs, DVPP not config";
  111. return INFER_STATUS(INVALID_INPUTS) << "Unexpected images as inputs, DVPP not config";
  112. }
  113. aclError rt_ret = aclrtSetCurrentContext(context_);
  114. if (rt_ret != ACL_ERROR_NONE) {
  115. MSI_LOG_ERROR << "set the ascend device context failed";
  116. return FAILED;
  117. }
  118. if (images_inputs.size() != 1) {
  119. MSI_LOG_ERROR << "Only support one input to do DVPP preprocess";
  120. return INFER_STATUS(INVALID_INPUTS) << "Only support one input to do DVPP preprocess";
  121. }
  122. if (images_inputs[0] == nullptr) {
  123. MSI_LOG_ERROR << "Get first images input failed";
  124. return FAILED;
  125. }
  126. if (images_inputs[0]->batch_size() != model_process_.GetBatchSize()) {
  127. MSI_LOG_ERROR << "Input batch size " << images_inputs[0]->batch_size() << " not match Model batch size "
  128. << model_process_.GetBatchSize();
  129. return INFER_STATUS(INVALID_INPUTS) << "Input batch size " << images_inputs[0]->batch_size()
  130. << " not match Model batch size " << model_process_.GetBatchSize();
  131. }
  132. if (request.size() != 0) {
  133. MSI_LOG_ERROR << "only support one input, images input size is 1, tensor inputs is not 0 " << request.size();
  134. return INFER_STATUS(INVALID_INPUTS) << "only support one input, images input size is 1, tensor inputs is not 0 "
  135. << request.size();
  136. }
  137. ImagesDvppOutput dvpp_output;
  138. Status ret = PreProcess(model_id, images_inputs[0], dvpp_output);
  139. if (ret != SUCCESS) {
  140. MSI_LOG_ERROR << "DVPP preprocess failed";
  141. return ret;
  142. }
  143. ret = model_process_.Execute(dvpp_output.buffer_device, dvpp_output.buffer_size, reply);
  144. if (ret != SUCCESS) {
  145. MSI_LOG_ERROR << "Execute model failed";
  146. return ret;
  147. }
  148. return SUCCESS;
  149. }
  150. Status AclSession::InitEnv(const std::string &device_type, uint32_t device_id) {
  151. device_type_ = device_type;
  152. device_id_ = device_id;
  153. auto ret = aclInit(nullptr);
  154. if (ret != ACL_ERROR_NONE) {
  155. MSI_LOG_ERROR << "Execute aclInit Failed";
  156. return FAILED;
  157. }
  158. MSI_LOG_INFO << "acl init success";
  159. ret = aclrtSetDevice(device_id_);
  160. if (ret != ACL_ERROR_NONE) {
  161. MSI_LOG_ERROR << "acl open device " << device_id_ << " failed";
  162. return FAILED;
  163. }
  164. MSI_LOG_INFO << "open device " << device_id_ << " success";
  165. ret = aclrtCreateContext(&context_, device_id_);
  166. if (ret != ACL_ERROR_NONE) {
  167. MSI_LOG_ERROR << "acl create context failed";
  168. return FAILED;
  169. }
  170. MSI_LOG_INFO << "create context success";
  171. ret = aclrtCreateStream(&stream_);
  172. if (ret != ACL_ERROR_NONE) {
  173. MSI_LOG_ERROR << "acl create stream failed";
  174. return FAILED;
  175. }
  176. MSI_LOG_INFO << "create stream success";
  177. aclrtRunMode run_mode;
  178. ret = aclrtGetRunMode(&run_mode);
  179. if (ret != ACL_ERROR_NONE) {
  180. MSI_LOG_ERROR << "acl get run mode failed";
  181. return FAILED;
  182. }
  183. bool is_device = (run_mode == ACL_DEVICE);
  184. model_process_.SetIsDevice(is_device);
  185. MSI_LOG_INFO << "get run mode success is device input/output " << is_device;
  186. if (dvpp_process_.InitResource(stream_) != SUCCESS) {
  187. MSI_LOG_ERROR << "dvpp init resource failed";
  188. return FAILED;
  189. }
  190. MSI_LOG_INFO << "Init acl success, device id " << device_id_;
  191. return SUCCESS;
  192. }
  193. Status AclSession::FinalizeEnv() {
  194. dvpp_process_.Finalize();
  195. aclError ret;
  196. if (stream_ != nullptr) {
  197. ret = aclrtDestroyStream(stream_);
  198. if (ret != ACL_ERROR_NONE) {
  199. MSI_LOG_ERROR << "destroy stream failed";
  200. }
  201. stream_ = nullptr;
  202. }
  203. MSI_LOG_INFO << "end to destroy stream";
  204. if (context_ != nullptr) {
  205. ret = aclrtDestroyContext(context_);
  206. if (ret != ACL_ERROR_NONE) {
  207. MSI_LOG_ERROR << "destroy context failed";
  208. }
  209. context_ = nullptr;
  210. }
  211. MSI_LOG_INFO << "end to destroy context";
  212. ret = aclrtResetDevice(device_id_);
  213. if (ret != ACL_ERROR_NONE) {
  214. MSI_LOG_ERROR << "reset devie " << device_id_ << " failed";
  215. }
  216. MSI_LOG_INFO << "end to reset device " << device_id_;
  217. ret = aclFinalize();
  218. if (ret != ACL_ERROR_NONE) {
  219. MSI_LOG_ERROR << "finalize acl failed";
  220. }
  221. MSI_LOG_INFO << "end to finalize acl";
  222. return SUCCESS;
  223. }
  224. AclSession::AclSession() = default;
  225. } // namespace mindspore::inference