diff --git a/mindspore_serving/ccsrc/worker/inference/mindspore_model_wrap.cc b/mindspore_serving/ccsrc/worker/inference/mindspore_model_wrap.cc index 9474f39..c79fa25 100644 --- a/mindspore_serving/ccsrc/worker/inference/mindspore_model_wrap.cc +++ b/mindspore_serving/ccsrc/worker/inference/mindspore_model_wrap.cc @@ -99,7 +99,8 @@ Status MindSporeModelWrap::LoadModelFromFile(serving::DeviceType device_type, ui } catch (std::runtime_error &ex) { return INFER_STATUS_LOG_ERROR(FAILED) << "Load model from file failed, model file: " << file_name << ", device_type: '" << device_type - << "', device_id: " << device_id << ", model type: " << model_type << ", options: " << other_options; + << "', device_id: " << device_id << ", model type: " << model_type << ", options: " << other_options + << ", build error detail: " << ex.what(); } ApiModelInfo api_model_info; diff --git a/mindspore_serving/master/__init__.py b/mindspore_serving/master/__init__.py index 8d4e8f2..15ca801 100644 --- a/mindspore_serving/master/__init__.py +++ b/mindspore_serving/master/__init__.py @@ -22,6 +22,5 @@ __all__.extend([ "start_grpc_server", 'start_restful_server', 'start_master_server', - 'stop', - 'context' + 'stop' ]) diff --git a/mindspore_serving/master/context.py b/mindspore_serving/master/context.py index 2d32d87..c84b7c9 100644 --- a/mindspore_serving/master/context.py +++ b/mindspore_serving/master/context.py @@ -16,6 +16,8 @@ from mindspore_serving._mindspore_serving import MasterContext_ from mindspore_serving.common import check_type +__all__ = ["set_max_request_buffer_count"] + _context = MasterContext_.get_instance()