From a6afe3cceba4f54962e8b849ef9ae752cc6c8a3e Mon Sep 17 00:00:00 2001 From: xuyongfei Date: Wed, 9 Dec 2020 13:52:23 +0800 Subject: [PATCH] serving: fix 910 device type --- .../ccsrc/python/master/master_py.cc | 5 ++++- mindspore_serving/ccsrc/python/serving_py.cc | 4 ++-- .../ccsrc/python/worker/worker_py.cc | 5 ++++- .../ccsrc/worker/inference/inference.h | 17 ++++++++++++++++- .../worker/inference/mindspore_model_wrap.cc | 5 +++-- 5 files changed, 29 insertions(+), 7 deletions(-) diff --git a/mindspore_serving/ccsrc/python/master/master_py.cc b/mindspore_serving/ccsrc/python/master/master_py.cc index 11fcd2f..1cb26d0 100644 --- a/mindspore_serving/ccsrc/python/master/master_py.cc +++ b/mindspore_serving/ccsrc/python/master/master_py.cc @@ -42,7 +42,10 @@ void PyMaster::StartRestfulServer(const std::string &ip, uint32_t grpc_port, int } void PyMaster::WaitAndClear() { - ExitHandle::Instance().MasterWait(); + { + py::gil_scoped_release release; + ExitHandle::Instance().MasterWait(); + } Server::Instance().Clear(); MSI_LOG_INFO << "Python server end wait and clear"; } diff --git a/mindspore_serving/ccsrc/python/serving_py.cc b/mindspore_serving/ccsrc/python/serving_py.cc index 008f385..097c253 100644 --- a/mindspore_serving/ccsrc/python/serving_py.cc +++ b/mindspore_serving/ccsrc/python/serving_py.cc @@ -108,7 +108,7 @@ PYBIND11_MODULE(_mindspore_serving, m) { .def_static("start_servable", &PyWorker::StartServable) .def_static("start_servable_in_master", &PyWorker::StartServableInMaster) .def_static("get_batch_size", &PyWorker::GetBatchSize) - .def_static("wait_and_clear", &PyWorker::WaitAndClear, py::call_guard()) + .def_static("wait_and_clear", &PyWorker::WaitAndClear) .def_static("stop", PyWorker::Stop) .def_static("get_py_task", &PyWorker::GetPyTask, py::call_guard()) .def_static("try_get_preprocess_py_task", &PyWorker::TryGetPreprocessPyTask) @@ -134,7 +134,7 @@ PYBIND11_MODULE(_mindspore_serving, m) { .def_static("start_grpc_server", &PyMaster::StartGrpcServer) .def_static("start_grpc_master_server", &PyMaster::StartGrpcMasterServer) .def_static("start_restful_server", &PyMaster::StartRestfulServer) - .def_static("wait_and_clear", &PyMaster::WaitAndClear, py::call_guard()) + .def_static("wait_and_clear", &PyMaster::WaitAndClear) .def_static("stop", &PyMaster::Stop); (void)py::module::import("atexit").attr("register")(py::cpp_function{[&]() -> void { diff --git a/mindspore_serving/ccsrc/python/worker/worker_py.cc b/mindspore_serving/ccsrc/python/worker/worker_py.cc index 25c2f8d..0d7085c 100644 --- a/mindspore_serving/ccsrc/python/worker/worker_py.cc +++ b/mindspore_serving/ccsrc/python/worker/worker_py.cc @@ -118,7 +118,10 @@ void PyWorker::PushPostprocessPyFailed(int count) { } void PyWorker::WaitAndClear() { - ExitHandle::Instance().WorkerWait(); + { + py::gil_scoped_release release; + ExitHandle::Instance().WorkerWait(); + } Worker::GetInstance().Clear(); } diff --git a/mindspore_serving/ccsrc/worker/inference/inference.h b/mindspore_serving/ccsrc/worker/inference/inference.h index e9b4498..4e816b3 100644 --- a/mindspore_serving/ccsrc/worker/inference/inference.h +++ b/mindspore_serving/ccsrc/worker/inference/inference.h @@ -198,7 +198,22 @@ static inline LogStream &operator<<(LogStream &stream, DeviceType device_type) { stream << "kDeviceTypeNotSpecified"; break; default: - stream << "[device type " << static_cast(device_type) << "]"; + stream << "[device type: " << static_cast(device_type) << "]"; + break; + } + return stream; +} + +static inline LogStream &operator<<(LogStream &stream, api::ModelType model_type) { + switch (model_type) { + case api::kMindIR: + stream << "kMindIR"; + break; + case api::kOM: + stream << "kOM"; + break; + default: + stream << "[model type: " << static_cast(model_type) << "]"; break; } return stream; diff --git a/mindspore_serving/ccsrc/worker/inference/mindspore_model_wrap.cc b/mindspore_serving/ccsrc/worker/inference/mindspore_model_wrap.cc index 2072c25..39ce7b5 100644 --- a/mindspore_serving/ccsrc/worker/inference/mindspore_model_wrap.cc +++ b/mindspore_serving/ccsrc/worker/inference/mindspore_model_wrap.cc @@ -83,11 +83,12 @@ Status MindSporeModelWrap::LoadModelFromFile(serving::DeviceType device_type, ui std::shared_ptr model = nullptr; try { - api::Context::Instance().SetDeviceTarget(api::kDeviceTypeAscend310).SetDeviceID(device_id); + api::Context::Instance().SetDeviceTarget(device_type_str).SetDeviceID(device_id); auto graph = api::Serialization::LoadModel(file_name, model_type); model = std::make_shared(api::GraphCell(graph)); } catch (std::runtime_error &ex) { - MSI_LOG_ERROR << "Load model from file failed, device_type " << device_type_str << ", device_id " << device_id; + MSI_LOG_ERROR << "Load model from file failed, device_type: '" << device_type_str << "', device_id: " << device_id + << ", model type: " << model_type; return FAILED; } api::Status status = model->Build({});