You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

stub_inference.cc 2.2 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include <memory>
  17. #include "worker/inference/inference.h"
  18. #include "worker/inference/mindspore_model_wrap.h"
  19. namespace mindspore::serving {
  20. InferenceLoader::InferenceLoader() {}
  21. InferenceLoader::~InferenceLoader() {}
  22. InferenceLoader &InferenceLoader::Instance() {
  23. static InferenceLoader inference;
  24. return inference;
  25. }
  26. std::shared_ptr<InferenceBase> InferenceLoader::CreateMindSporeInfer() {
  27. return std::make_shared<MindSporeModelWrap>();
  28. }
  29. Status InferenceLoader::LoadMindSporeModelWrap() { return SUCCESS; }
  30. DeviceType InferenceLoader::GetSupportDeviceType(DeviceType device_type, ModelType model_type) {
  31. auto mindspore_infer = CreateMindSporeInfer();
  32. if (mindspore_infer == nullptr) {
  33. MSI_LOG_ERROR << "Create MindSpore infer failed";
  34. return kDeviceTypeNotSpecified;
  35. }
  36. if (model_type == kUnknownType) {
  37. model_type = kMindIR;
  38. }
  39. if (device_type == kDeviceTypeNotSpecified) {
  40. auto ascend_list = {kDeviceTypeAscendCL, kDeviceTypeAscendMS, kDeviceTypeGpu};
  41. for (auto item : ascend_list) {
  42. if (mindspore_infer->CheckModelSupport(item, model_type)) {
  43. return item;
  44. }
  45. }
  46. } else if (device_type == kDeviceTypeAscend) {
  47. auto ascend_list = {kDeviceTypeAscendCL, kDeviceTypeAscendMS};
  48. for (auto item : ascend_list) {
  49. if (mindspore_infer->CheckModelSupport(item, model_type)) {
  50. return item;
  51. }
  52. }
  53. } else {
  54. if (mindspore_infer->CheckModelSupport(device_type, model_type)) {
  55. return device_type;
  56. }
  57. }
  58. return kDeviceTypeNotSpecified;
  59. }
  60. } // namespace mindspore::serving

A lightweight and high-performance service module that helps MindSpore developers efficiently deploy online inference services in the production environment.