You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

stub_preprocess.cc 2.4 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "worker/preprocess.h"
  17. #include "mindspore_serving/ccsrc/common/tensor.h"
  18. namespace mindspore::serving {
  19. class StubCastInt32toFp32Preprocess : public PreprocessBase {
  20. public:
  21. Status Preprocess(const std::string &postprocess_name, const InstanceData &input, InstanceData *output) override {
  22. MSI_EXCEPTION_IF_NULL(output);
  23. auto x1 = input[0];
  24. auto x2 = input[1];
  25. if (x1->data_type() != kMSI_Int32 || x2->data_type() != kMSI_Int32) {
  26. return INFER_STATUS_LOG_ERROR(FAILED)
  27. << "Preprocess failed: Input data type invalid " << x1->data_type() << ", " << x2->data_type();
  28. }
  29. auto y1 = std::make_shared<Tensor>();
  30. y1->set_data_type(serving::kMSI_Float32);
  31. y1->resize_data(x1->data_size());
  32. y1->set_shape(x1->shape());
  33. output->push_back(y1);
  34. auto y2 = std::make_shared<Tensor>();
  35. y2->set_data_type(serving::kMSI_Float32);
  36. y2->resize_data(x2->data_size());
  37. y2->set_shape(x2->shape());
  38. output->push_back(y2);
  39. auto x1_data = reinterpret_cast<const int32_t *>(x1->data());
  40. auto y1_data = reinterpret_cast<float *>(y1->mutable_data());
  41. for (size_t i = 0; i < y1->data_size() / 4; i++) {
  42. y1_data[i] = static_cast<float>(x1_data[i]);
  43. }
  44. auto x2_data = reinterpret_cast<const int32_t *>(x2->data());
  45. auto y2_data = reinterpret_cast<float *>(y2->mutable_data());
  46. for (size_t i = 0; i < y2->data_size() / 4; i++) {
  47. y2_data[i] = static_cast<float>(x2_data[i]);
  48. }
  49. return SUCCESS;
  50. }
  51. size_t GetInputsCount(const std::string &postprocess_name) const override { return 2; }
  52. size_t GetOutputsCount(const std::string &postprocess_name) const override { return 2; }
  53. };
  54. REGISTER_PREPROCESS(StubCastInt32toFp32Preprocess, "stub_preprocess_cast_int32_to_fp32_cpp")
  55. } // namespace mindspore::serving

A lightweight and high-performance service module that helps MindSpore developers efficiently deploy online inference services in the production environment.