You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

session.cc 4.1 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "core/session.h"
  17. #include <grpcpp/grpcpp.h>
  18. #include <string>
  19. #include <map>
  20. #include <vector>
  21. #include <utility>
  22. #include <memory>
  23. #include <chrono>
  24. #include "include/infer_log.h"
  25. #include "serving/ms_service.grpc.pb.h"
  26. #include "core/util/option_parser.h"
  27. #include "core/version_control/version_controller.h"
  28. #include "core/util/file_system_operation.h"
  29. #include "core/serving_tensor.h"
  30. using ms_serving::MSService;
  31. using ms_serving::PredictReply;
  32. using ms_serving::PredictRequest;
  33. namespace mindspore {
  34. namespace serving {
  35. Status Session::CreatDeviceSession(const std::string &device, uint32_t device_id) {
  36. session_ = inference::InferSession::CreateSession(device, device_id);
  37. if (session_ == nullptr) {
  38. MSI_LOG(ERROR) << "Creat Session Failed";
  39. return FAILED;
  40. }
  41. device_type_ = device;
  42. return SUCCESS;
  43. }
  44. Session &Session::Instance() {
  45. static Session instance;
  46. return instance;
  47. }
  48. Status Session::Predict(const PredictRequest &request, PredictReply &reply) {
  49. if (!model_loaded_) {
  50. MSI_LOG(ERROR) << "the model has not loaded";
  51. return FAILED;
  52. }
  53. if (session_ == nullptr) {
  54. MSI_LOG(ERROR) << "the inference session has not be initialized";
  55. return FAILED;
  56. }
  57. std::lock_guard<std::mutex> lock(mutex_);
  58. MSI_LOG(INFO) << "run Predict";
  59. if (request.images_size() > 0) {
  60. ServingImagesRequest serving_images(request);
  61. ServingRequest serving_request(request);
  62. ServingReply serving_reply(reply);
  63. Status ret = session_->ExecuteModel(graph_id_, serving_images, serving_request, serving_reply);
  64. if (ret != SUCCESS) {
  65. MSI_LOG(ERROR) << "execute model with images return failed";
  66. return ret;
  67. }
  68. } else if (request.data_size() > 0) {
  69. ServingRequest serving_request(request);
  70. ServingReply serving_reply(reply);
  71. Status ret = session_->ExecuteModel(graph_id_, serving_request, serving_reply);
  72. if (ret != SUCCESS) {
  73. MSI_LOG(ERROR) << "execute model with datas return failed";
  74. return ret;
  75. }
  76. }
  77. MSI_LOG(INFO) << "run Predict finished";
  78. return SUCCESS;
  79. }
  80. Status Session::Warmup(const MindSporeModelPtr model) {
  81. if (session_ == nullptr) {
  82. MSI_LOG(ERROR) << "The CreatDeviceSession should be called, before warmup";
  83. return FAILED;
  84. }
  85. std::lock_guard<std::mutex> lock(mutex_);
  86. std::string file_name = model->GetModelPath() + '/' + model->GetModelName();
  87. model_loaded_ = false;
  88. MSI_TIME_STAMP_START(LoadModelFromFile)
  89. auto ret = session_->LoadModelFromFile(file_name, graph_id_);
  90. MSI_TIME_STAMP_END(LoadModelFromFile)
  91. if (ret != SUCCESS) {
  92. MSI_LOG(ERROR) << "Load graph model failed, file name is " << file_name.c_str();
  93. return ret;
  94. }
  95. model_loaded_ = true;
  96. MSI_LOG(INFO) << "Session Warmup finished";
  97. return SUCCESS;
  98. }
  99. Status Session::Clear() {
  100. if (session_ != nullptr) {
  101. session_->UnloadModel(graph_id_);
  102. session_->FinalizeEnv();
  103. session_ = nullptr;
  104. }
  105. return SUCCESS;
  106. }
  107. Status Session::GetModelInputsInfo(std::vector<inference::InferTensor> &tensor_list) {
  108. if (!model_loaded_) {
  109. MSI_LOG(ERROR) << "the model has not loaded";
  110. return FAILED;
  111. }
  112. if (session_ == nullptr) {
  113. MSI_LOG(ERROR) << "the inference session has not be initialized";
  114. return FAILED;
  115. }
  116. std::lock_guard<std::mutex> lock(mutex_);
  117. Status ret = session_->GetModelInputsInfo(graph_id_, &tensor_list);
  118. if (ret != SUCCESS) {
  119. MSI_LOG(ERROR) << "get model inputs info failed";
  120. }
  121. return ret;
  122. }
  123. } // namespace serving
  124. } // namespace mindspore