You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

trt_loader.cc 2.4 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667
  1. /**
  2. * Copyright 2021 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "runtime/device/gpu/trt_loader.h"
  17. #include <dlfcn.h>
  18. #include <memory>
  19. #include <NvInferRuntimeCommon.h>
  20. #include "backend/kernel_compiler/gpu/trt/trt_utils.h"
  21. namespace mindspore {
  22. namespace device {
  23. namespace gpu {
  24. TrtLoader::TrtLoader()
  25. : nvinfer_loaded_(false), nvinfer_handle_(nullptr), create_infer_builder_(nullptr), create_infer_runtime_(nullptr) {
  26. nvinfer_handle_ = dlopen("libnvinfer.so", RTLD_NOW | RTLD_LOCAL);
  27. if (nvinfer_handle_ == nullptr) {
  28. MS_LOG(WARNING) << "Can not open libnvinfer.so. " << dlerror()
  29. << ". Install Tensor-RT and export LD_LIBRARY_PATH=${TENSORRT_HOME}/lib:$LD_LIBRARY_PATH.";
  30. MS_LOG(WARNING) << "Inference with native backend.";
  31. return;
  32. }
  33. create_infer_builder_ = (CreateInferBuilder_t)dlsym(nvinfer_handle_, "createInferBuilder_INTERNAL");
  34. if (create_infer_builder_ == nullptr) {
  35. MS_LOG(WARNING) << "Failed to get createInferBuilder_INTERNAL symbol. " << dlerror();
  36. return;
  37. }
  38. create_infer_runtime_ = (CreateInferRuntime_t)dlsym(nvinfer_handle_, "createInferRuntime_INTERNAL");
  39. if (create_infer_runtime_ == nullptr) {
  40. MS_LOG(WARNING) << "Failed to get createInferRuntime_INTERNAL symbol. " << dlerror();
  41. return;
  42. }
  43. nvinfer_loaded_ = true;
  44. }
  45. TrtLoader::~TrtLoader() {
  46. if (nvinfer_handle_ != nullptr) {
  47. dlclose(nvinfer_handle_);
  48. }
  49. }
  50. std::shared_ptr<nvinfer1::IBuilder> TrtLoader::CreateInferBuilder(nvinfer1::ILogger *logger) {
  51. return TrtPtr<nvinfer1::IBuilder>(create_infer_builder_(*logger, NV_TENSORRT_VERSION));
  52. }
  53. std::shared_ptr<nvinfer1::IRuntime> TrtLoader::CreateInferRuntime(nvinfer1::ILogger *logger) {
  54. return TrtPtr<nvinfer1::IRuntime>(create_infer_runtime_(*logger, NV_TENSORRT_VERSION));
  55. }
  56. } // namespace gpu
  57. } // namespace device
  58. } // namespace mindspore