You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

parse_restful.cc 45 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "common/common_test.h"
  17. #include "master/server.h"
  18. #include "common/tensor_base.h"
  19. #define private public
  20. #include "master/restful/http_process.h"
  21. #undef private
  22. using std::string;
  23. using std::vector;
  24. namespace mindspore {
  25. namespace serving {
  26. class TestParseInput : public UT::Common {
  27. public:
  28. TestParseInput() = default;
  29. };
  30. class TestParseReply : public UT::Common {
  31. public:
  32. TestParseReply() = default;
  33. };
  34. TEST_F(TestParseInput, test_parse_SUCCESS) {
  35. nlohmann::json js = R"(
  36. {"instances":[
  37. {
  38. "key_tag":"scalar",
  39. "key_int": 1,
  40. "key_bool": false,
  41. "key_float": 2.3,
  42. "key_str": "ut_test",
  43. "key_bytes": {"b64": "dXRfdGVzdA==", "type": "bytes"}
  44. },
  45. {
  46. "key_tag":"tensor",
  47. "key_int": [1,2,3],
  48. "key_bool":[[true, false], [false, true]],
  49. "key_float":[[1.1, 2.2]],
  50. "key_str":["ut_test"],
  51. "key_bytes":{"b64":"dXRfdGVzdA=="}
  52. },
  53. {
  54. "key_tag":"b64",
  55. "key_str_format1":"ut_test",
  56. "key_str_foramt2":{"b64":"dXRfdGVzdA==", "type":"str"},
  57. "key_bytes_int16":{"b64":"AQACAAIAAwADAAQA", "type":"int16", "shape":[3,2]},
  58. "key_bytes_fp16":{"b64":"ZjxmQJpCZkQ=", "type":"fp16", "shape":[2,2]},
  59. "key_bytes_bool":{"b64":"AQA=", "type":"bool", "shape":[1,2]}
  60. }
  61. ]
  62. }
  63. )"_json;
  64. struct evhttp_request *request = new evhttp_request();
  65. int size = 100;
  66. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  67. request_msg->request_message_ = js;
  68. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  69. proto::PredictRequest predict_request;
  70. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  71. RestfulService restful_service(dispatcher_);
  72. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  73. ASSERT_EQ(status.StatusCode(), SUCCESS);
  74. ASSERT_EQ(predict_request.instances().size(), 3);
  75. for (int32_t i = 0; i < predict_request.instances().size(); i++) {
  76. auto &cur_instance = predict_request.instances(i);
  77. auto &items = cur_instance.items();
  78. if (i == 0) {
  79. ASSERT_EQ(items.size(), 6);
  80. for (const auto &item : items) {
  81. ProtoTensor pb_tensor(const_cast<proto::Tensor *>(&item.second));
  82. if (item.first == "key_int") {
  83. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_Int32);
  84. const int32_t *data = reinterpret_cast<const int32_t *>(pb_tensor.data());
  85. ASSERT_EQ(*data, 1);
  86. } else if (item.first == "key_bool") {
  87. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_Bool);
  88. const bool *data = reinterpret_cast<const bool *>(pb_tensor.data());
  89. ASSERT_EQ(*data, false);
  90. } else if (item.first == "key_float") {
  91. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_Float32);
  92. const float *data = reinterpret_cast<const float *>(pb_tensor.data());
  93. ASSERT_FLOAT_EQ(*data, 2.3);
  94. } else if (item.first == "key_str") {
  95. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_String);
  96. auto str_nums = pb_tensor.bytes_data_size();
  97. ASSERT_EQ(str_nums, 1);
  98. std::string value;
  99. size_t length;
  100. const uint8_t *ptr = nullptr;
  101. pb_tensor.get_bytes_data(0, &ptr, &length);
  102. value.resize(length);
  103. memcpy_s(value.data(), length, reinterpret_cast<const char *>(ptr), length);
  104. ASSERT_EQ(value, "ut_test");
  105. } else if (item.first == "key_bytes") {
  106. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_Bytes);
  107. auto str_nums = pb_tensor.bytes_data_size();
  108. ASSERT_EQ(str_nums, 1);
  109. std::string value;
  110. size_t length;
  111. const uint8_t *ptr = nullptr;
  112. pb_tensor.get_bytes_data(0, &ptr, &length);
  113. value.resize(length);
  114. memcpy_s(value.data(), length, reinterpret_cast<const char *>(ptr), length);
  115. ASSERT_EQ(value, "ut_test");
  116. }
  117. }
  118. } else if (i == 1) {
  119. ASSERT_EQ(items.size(), 6);
  120. for (const auto &item : items) {
  121. ProtoTensor pb_tensor(const_cast<proto::Tensor *>(&item.second));
  122. auto shape = pb_tensor.shape();
  123. if (item.first == "key_int") {
  124. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_Int32);
  125. ASSERT_EQ(shape.size(), 1);
  126. ASSERT_EQ(shape[0], 3);
  127. vector<int32_t> expected_value = {1, 2, 3};
  128. for (int i = 0; i < 3; i++) {
  129. const int32_t *data = reinterpret_cast<const int32_t *>(pb_tensor.data()) + i;
  130. ASSERT_EQ(*data, expected_value[i]);
  131. }
  132. } else if (item.first == "key_bool") {
  133. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_Bool);
  134. ASSERT_EQ(shape.size(), 2);
  135. ASSERT_EQ(shape[0], 2);
  136. ASSERT_EQ(shape[1], 2);
  137. vector<vector<bool>> expected_value = {{true, false}, {false, true}};
  138. for (int i = 0; i < 2; i++) {
  139. for (int j = 0; j < 2; j++) {
  140. const bool *data = reinterpret_cast<const bool *>(pb_tensor.data()) + i * 2 + j;
  141. ASSERT_EQ(*data, expected_value[i][j]);
  142. }
  143. }
  144. } else if (item.first == "key_float") {
  145. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_Float32);
  146. ASSERT_EQ(shape.size(), 2);
  147. ASSERT_EQ(shape[0], 1);
  148. ASSERT_EQ(shape[1], 2);
  149. vector<vector<float>> expected_value = {{1.1, 2.2}};
  150. for (int i = 0; i < 1; i++) {
  151. for (int j = 0; j < 2; j++) {
  152. const float *data = reinterpret_cast<const float *>(pb_tensor.data()) + i * 1 + j;
  153. ASSERT_FLOAT_EQ(*data, expected_value[i][j]);
  154. }
  155. }
  156. } else if (item.first == "key_str") {
  157. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_String);
  158. ASSERT_EQ(shape.size(), 1);
  159. ASSERT_EQ(shape[0], 1);
  160. auto str_nums = pb_tensor.bytes_data_size();
  161. ASSERT_EQ(str_nums, 1);
  162. std::string value;
  163. size_t length;
  164. const uint8_t *ptr = nullptr;
  165. pb_tensor.get_bytes_data(0, &ptr, &length);
  166. value.resize(length);
  167. memcpy_s(value.data(), length, reinterpret_cast<const char *>(ptr), length);
  168. ASSERT_EQ(value, "ut_test");
  169. } else if (item.first == "key_bytes") {
  170. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_Bytes);
  171. auto str_nums = pb_tensor.bytes_data_size();
  172. ASSERT_EQ(str_nums, 1);
  173. std::string value;
  174. size_t length;
  175. const uint8_t *ptr = nullptr;
  176. pb_tensor.get_bytes_data(0, &ptr, &length);
  177. value.resize(length);
  178. memcpy_s(value.data(), length, reinterpret_cast<const char *>(ptr), length);
  179. ASSERT_EQ(value, "ut_test");
  180. }
  181. }
  182. } else if (i == 2) {
  183. ASSERT_EQ(items.size(), 6);
  184. for (const auto &item : items) {
  185. ProtoTensor pb_tensor(const_cast<proto::Tensor *>(&item.second));
  186. auto shape = pb_tensor.shape();
  187. if (item.first == "key_str_format1") {
  188. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_String);
  189. auto str_nums = pb_tensor.bytes_data_size();
  190. ASSERT_EQ(str_nums, 1);
  191. std::string value;
  192. size_t length;
  193. const uint8_t *ptr = nullptr;
  194. pb_tensor.get_bytes_data(0, &ptr, &length);
  195. value.resize(length);
  196. memcpy_s(value.data(), length, reinterpret_cast<const char *>(ptr), length);
  197. ASSERT_EQ(value, "ut_test");
  198. } else if (item.first == "key_str_format2") {
  199. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_String);
  200. auto str_nums = pb_tensor.bytes_data_size();
  201. ASSERT_EQ(str_nums, 1);
  202. std::string value;
  203. size_t length;
  204. const uint8_t *ptr = nullptr;
  205. pb_tensor.get_bytes_data(0, &ptr, &length);
  206. value.resize(length);
  207. memcpy_s(value.data(), length, reinterpret_cast<const char *>(ptr), length);
  208. ASSERT_EQ(value, "ut_test");
  209. } else if (item.first == "key_bytes_int16") {
  210. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_Int16);
  211. ASSERT_EQ(shape.size(), 2);
  212. ASSERT_EQ(shape[0], 3);
  213. ASSERT_EQ(shape[1], 2);
  214. vector<vector<int16_t>> expected_value = {{1, 2}, {2, 3}, {3, 4}};
  215. for (int i = 0; i < 3; i++) {
  216. for (int j = 0; j < 2; j++) {
  217. const int16_t *data = reinterpret_cast<const int16_t *>(pb_tensor.data()) + i * 2 + j;
  218. ASSERT_FLOAT_EQ(*data, expected_value[i][j]);
  219. }
  220. }
  221. } else if (item.first == "key_bytes_fp16") {
  222. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_Float16);
  223. ASSERT_EQ(shape.size(), 2);
  224. ASSERT_EQ(shape[0], 2);
  225. ASSERT_EQ(shape[1], 2);
  226. } else if (item.first == "key_bytes_bool") {
  227. ASSERT_EQ(pb_tensor.data_type(), DataType::kMSI_Bool);
  228. ASSERT_EQ(shape.size(), 2);
  229. ASSERT_EQ(shape[0], 1);
  230. ASSERT_EQ(shape[1], 2);
  231. vector<vector<bool>> expected_value = {{true, false}};
  232. for (int i = 0; i < 1; i++) {
  233. for (int j = 0; j < 2; j++) {
  234. const bool *data = reinterpret_cast<const bool *>(pb_tensor.data()) + i * 2 + j;
  235. ASSERT_FLOAT_EQ(*data, expected_value[i][j]);
  236. }
  237. }
  238. }
  239. }
  240. }
  241. }
  242. }
  243. TEST_F(TestParseInput, test_instances_empty_FAIL) {
  244. nlohmann::json js = R"(
  245. {"":
  246. {
  247. "key_tag":"scalar",
  248. "key_int": 1,
  249. "key_bool": false,
  250. "key_float": 2.3,
  251. "key_str": "ut_test",
  252. "key_bytes": {"b64": "dXRfdGVzdA==", "type": "bytes"}
  253. }
  254. }
  255. )"_json;
  256. struct evhttp_request *request = new evhttp_request();
  257. int size = 100;
  258. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  259. request_msg->request_message_ = js;
  260. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  261. proto::PredictRequest predict_request;
  262. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  263. RestfulService restful_service(dispatcher_);
  264. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  265. ASSERT_NE(status.StatusCode(), SUCCESS);
  266. }
  267. TEST_F(TestParseInput, test_instances_incorrect_FAIL) {
  268. nlohmann::json js = R"(
  269. {"instance":
  270. {
  271. "key_tag":"scalar",
  272. "key_int": 1,
  273. "key_bool": false,
  274. "key_float": 2.3,
  275. "key_str": "ut_test",
  276. "key_bytes": {"b64": "dXRfdGVzdA==", "type": "bytes"}
  277. }
  278. }
  279. )"_json;
  280. struct evhttp_request *request = new evhttp_request();
  281. int size = 100;
  282. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  283. request_msg->request_message_ = js;
  284. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  285. proto::PredictRequest predict_request;
  286. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  287. RestfulService restful_service(dispatcher_);
  288. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  289. ASSERT_NE(status.StatusCode(), SUCCESS);
  290. }
  291. TEST_F(TestParseInput, test_key_empty_FAIL) {
  292. nlohmann::json js = R"(
  293. {"instances":
  294. {
  295. "":"scalar",
  296. "key_int": 1,
  297. "key_bool": false,
  298. "key_float": 2.3,
  299. "key_str": "ut_test",
  300. "key_bytes": {"b64": "dXRfdGVzdA==", "type": "bytes"}
  301. }
  302. }
  303. )"_json;
  304. struct evhttp_request *request = new evhttp_request();
  305. int size = 100;
  306. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  307. request_msg->request_message_ = js;
  308. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  309. proto::PredictRequest predict_request;
  310. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  311. RestfulService restful_service(dispatcher_);
  312. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  313. ASSERT_NE(status.StatusCode(), SUCCESS);
  314. }
  315. TEST_F(TestParseInput, test_value_empty_FAIL) {
  316. nlohmann::json js = R"(
  317. {"instances":
  318. {
  319. "key_tag":"",
  320. "key_int": 1,
  321. "key_bool": false,
  322. "key_float": 2.3,
  323. "key_str": "ut_test",
  324. "key_bytes": {"b64": "dXRfdGVzdA==", "type": "bytes"}
  325. }
  326. }
  327. )"_json;
  328. struct evhttp_request *request = new evhttp_request();
  329. int size = 100;
  330. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  331. request_msg->request_message_ = js;
  332. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  333. proto::PredictRequest predict_request;
  334. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  335. RestfulService restful_service(dispatcher_);
  336. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  337. ASSERT_NE(status.StatusCode(), SUCCESS);
  338. }
  339. TEST_F(TestParseInput, test_obj_unknown_key_FAIL) {
  340. nlohmann::json js = R"(
  341. {"instances":
  342. {
  343. "key_tag":"",
  344. "key_int": 1,
  345. "key_bool": false,
  346. "key_float": 2.3,
  347. "key_str": "ut_test",
  348. "key_bytes": {"b64": "dXRfdGVzdA==", "type": "bytes", "type1":"bytes"}
  349. }
  350. }
  351. )"_json;
  352. struct evhttp_request *request = new evhttp_request();
  353. int size = 100;
  354. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  355. request_msg->request_message_ = js;
  356. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  357. proto::PredictRequest predict_request;
  358. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  359. RestfulService restful_service(dispatcher_);
  360. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  361. ASSERT_NE(status.StatusCode(), SUCCESS);
  362. }
  363. TEST_F(TestParseInput, test_obj_nob64_key_FAIL) {
  364. nlohmann::json js = R"(
  365. {"instances":
  366. {
  367. "key_tag":"",
  368. "key_int": 1,
  369. "key_bool": false,
  370. "key_float": 2.3,
  371. "key_str": "ut_test",
  372. "key_bytes": {"base64": "dXRfdGVzdA==", "type": "bytes"}
  373. }
  374. }
  375. )"_json;
  376. struct evhttp_request *request = new evhttp_request();
  377. int size = 100;
  378. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  379. request_msg->request_message_ = js;
  380. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  381. proto::PredictRequest predict_request;
  382. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  383. RestfulService restful_service(dispatcher_);
  384. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  385. ASSERT_NE(status.StatusCode(), SUCCESS);
  386. }
  387. TEST_F(TestParseInput, test_obj_illegal_b64value_FAIL) {
  388. nlohmann::json js = R"(
  389. {"instances":
  390. {
  391. "key_tag":"",
  392. "key_int": 1,
  393. "key_bool": false,
  394. "key_float": 2.3,
  395. "key_str": "ut_test",
  396. "key_bytes": {"base64": "dXRfdGVzdA", "type": "bytes"}
  397. }
  398. }
  399. )"_json;
  400. struct evhttp_request *request = new evhttp_request();
  401. int size = 100;
  402. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  403. request_msg->request_message_ = js;
  404. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  405. proto::PredictRequest predict_request;
  406. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  407. RestfulService restful_service(dispatcher_);
  408. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  409. ASSERT_NE(status.StatusCode(), SUCCESS);
  410. }
  411. TEST_F(TestParseInput, test_obj_unknown_type_FAIL) {
  412. nlohmann::json js = R"(
  413. {"instances":
  414. {
  415. "key_tag":"",
  416. "key_int": 1,
  417. "key_bool": false,
  418. "key_float": 2.3,
  419. "key_str": "ut_test",
  420. "key_bytes": {"base64": "dXRfdGVzdA==", "type": "INt"}
  421. }
  422. }
  423. )"_json;
  424. struct evhttp_request *request = new evhttp_request();
  425. int size = 100;
  426. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  427. request_msg->request_message_ = js;
  428. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  429. proto::PredictRequest predict_request;
  430. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  431. RestfulService restful_service(dispatcher_);
  432. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  433. ASSERT_NE(status.StatusCode(), SUCCESS);
  434. }
  435. TEST_F(TestParseInput, test_obj_error_shape_format_FAIL) {
  436. nlohmann::json js = R"(
  437. {"instances":
  438. {
  439. "key_tag":"",
  440. "key_int": 1,
  441. "key_bool": false,
  442. "key_float": 2.3,
  443. "key_str": "ut_test",
  444. "key_bytes_int16":{"b64":"AQACAAIAAwADAAQA", "type":"int16", "shape":3}
  445. }
  446. }
  447. )"_json;
  448. struct evhttp_request *request = new evhttp_request();
  449. int size = 100;
  450. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  451. request_msg->request_message_ = js;
  452. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  453. proto::PredictRequest predict_request;
  454. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  455. RestfulService restful_service(dispatcher_);
  456. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  457. ASSERT_NE(status.StatusCode(), SUCCESS);
  458. }
  459. TEST_F(TestParseInput, test_obj_error_shape_format2_FAIL) {
  460. nlohmann::json js = R"(
  461. {"instances":
  462. {
  463. "key_tag":"",
  464. "key_int": 1,
  465. "key_bool": false,
  466. "key_float": 2.3,
  467. "key_str": "ut_test",
  468. "key_bytes_int16":{"b64":"AQACAAIAAwADAAQA", "type":"int16", "shape":[[3],[2]]}
  469. }
  470. }
  471. )"_json;
  472. struct evhttp_request *request = new evhttp_request();
  473. int size = 100;
  474. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  475. request_msg->request_message_ = js;
  476. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  477. proto::PredictRequest predict_request;
  478. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  479. RestfulService restful_service(dispatcher_);
  480. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  481. ASSERT_NE(status.StatusCode(), SUCCESS);
  482. }
  483. TEST_F(TestParseInput, test_obj_error_shape_value_FAIL) {
  484. nlohmann::json js = R"(
  485. {"instances":
  486. {
  487. "key_tag":"",
  488. "key_int": 1,
  489. "key_bool": false,
  490. "key_float": 2.3,
  491. "key_str": "ut_test",
  492. "key_bytes_int16":{"b64":"AQACAAIAAwADAAQA", "type":"int16", "shape":[3.0,2.0]}
  493. }
  494. }
  495. )"_json;
  496. struct evhttp_request *request = new evhttp_request();
  497. int size = 100;
  498. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  499. request_msg->request_message_ = js;
  500. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  501. proto::PredictRequest predict_request;
  502. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  503. RestfulService restful_service(dispatcher_);
  504. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  505. ASSERT_NE(status.StatusCode(), SUCCESS);
  506. }
  507. TEST_F(TestParseInput, test_obj_error_shape_value2_FAIL) {
  508. nlohmann::json js = R"(
  509. {"instances":
  510. {
  511. "key_tag":"",
  512. "key_int": 1,
  513. "key_bool": false,
  514. "key_float": 2.3,
  515. "key_str": "ut_test",
  516. "key_bytes_int16":{"b64":"AQACAAIAAwADAAQA", "type":"int16", "shape":[3,3]}
  517. }
  518. }
  519. )"_json;
  520. struct evhttp_request *request = new evhttp_request();
  521. int size = 100;
  522. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  523. request_msg->request_message_ = js;
  524. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  525. proto::PredictRequest predict_request;
  526. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  527. RestfulService restful_service(dispatcher_);
  528. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  529. ASSERT_NE(status.StatusCode(), SUCCESS);
  530. }
  531. TEST_F(TestParseInput, test_obj_error_shape_value3_FAIL) {
  532. nlohmann::json js = R"(
  533. {"instances":
  534. {
  535. "key_tag":"",
  536. "key_int": 1,
  537. "key_bool": false,
  538. "key_float": 2.3,
  539. "key_str": "ut_test",
  540. "key_bytes_int16":{"b64":"AQACAAIAAwADAAQA", "type":"int16", "shape":[3,-2]}
  541. }
  542. }
  543. )"_json;
  544. struct evhttp_request *request = new evhttp_request();
  545. int size = 100;
  546. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  547. request_msg->request_message_ = js;
  548. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  549. proto::PredictRequest predict_request;
  550. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  551. RestfulService restful_service(dispatcher_);
  552. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  553. ASSERT_NE(status.StatusCode(), SUCCESS);
  554. }
  555. TEST_F(TestParseInput, test_tensor_value_empty_FAIL) {
  556. nlohmann::json js = R"(
  557. {"instances":
  558. {
  559. "key_tag":"tensor",
  560. "key_int": [],
  561. "key_bool":[[true, false], [false, true]],
  562. "key_float":[[1.1, 2.2]],
  563. "key_str":["ut_test"],
  564. "key_bytes":{"b64":"dXRfdGVzdA=="}
  565. }
  566. }
  567. )"_json;
  568. struct evhttp_request *request = new evhttp_request();
  569. int size = 100;
  570. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  571. request_msg->request_message_ = js;
  572. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  573. proto::PredictRequest predict_request;
  574. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  575. RestfulService restful_service(dispatcher_);
  576. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  577. ASSERT_NE(status.StatusCode(), SUCCESS);
  578. }
  579. TEST_F(TestParseInput, test_tensor_value_diff_type_FAIL) {
  580. nlohmann::json js = R"(
  581. {"instances":
  582. {
  583. "key_tag":"tensor",
  584. "key_int": [1, 2.0],
  585. "key_bool":[[true, false], [false, true]],
  586. "key_float":[[1.1, 2.2]],
  587. "key_str":["ut_test"],
  588. "key_bytes":{"b64":"dXRfdGVzdA=="}
  589. }
  590. }
  591. )"_json;
  592. struct evhttp_request *request = new evhttp_request();
  593. int size = 100;
  594. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  595. request_msg->request_message_ = js;
  596. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  597. proto::PredictRequest predict_request;
  598. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  599. RestfulService restful_service(dispatcher_);
  600. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  601. ASSERT_NE(status.StatusCode(), SUCCESS);
  602. }
  603. TEST_F(TestParseInput, test_tensor_value_diff_dimention_FAIL) {
  604. nlohmann::json js = R"(
  605. {"instances":
  606. {
  607. "key_tag":"tensor",
  608. "key_int": [1, 2],
  609. "key_bool":[[true, false], [false]],
  610. "key_float":[[1.1, 2.2]],
  611. "key_str":["ut_test"],
  612. "key_bytes":{"b64":"dXRfdGVzdA=="}
  613. }
  614. }
  615. )"_json;
  616. struct evhttp_request *request = new evhttp_request();
  617. int size = 100;
  618. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  619. request_msg->request_message_ = js;
  620. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  621. proto::PredictRequest predict_request;
  622. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  623. RestfulService restful_service(dispatcher_);
  624. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  625. ASSERT_NE(status.StatusCode(), SUCCESS);
  626. }
  627. TEST_F(TestParseInput, test_tensor_multi_object_FAIL) {
  628. nlohmann::json js = R"(
  629. {"instances":
  630. {
  631. "key_tag":"tensor",
  632. "key_int": [1, 2],
  633. "key_bool":[[true, false], [false, true]],
  634. "key_float":[[1.1, 2.2]],
  635. "key_str":["ut_test"],
  636. "key_bytes":[{"b64":"dXRfdGVzdA=="}, {"b64":"dXRfdGVzdA=="}]
  637. }
  638. }
  639. )"_json;
  640. struct evhttp_request *request = new evhttp_request();
  641. int size = 100;
  642. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  643. request_msg->request_message_ = js;
  644. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  645. proto::PredictRequest predict_request;
  646. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  647. RestfulService restful_service(dispatcher_);
  648. Status status = restful_service.ParseRequest(restful_request, &predict_request);
  649. ASSERT_NE(status.StatusCode(), SUCCESS);
  650. }
  651. TEST_F(TestParseReply, test_reply_SUCCESS) {
  652. nlohmann::json js = R"(
  653. {"instances":[
  654. {
  655. "key_tag":"scalar",
  656. "key_int": 1,
  657. "key_bool": false,
  658. "key_float": 2.3,
  659. "key_str": "ut_test",
  660. "key_bytes": {"b64": "dXRfdGVzdA==", "type": "bytes"}
  661. },
  662. {
  663. "key_tag":"tensor",
  664. "key_int": [1,2,3],
  665. "key_bool":[[true, false], [false, true]],
  666. "key_float":[[1.1, 2.2]],
  667. "key_str":["ut_test"]
  668. }
  669. ]
  670. }
  671. )"_json;
  672. struct evhttp_request *request = new evhttp_request();
  673. int size = 100;
  674. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  675. request_msg->request_message_ = js;
  676. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  677. proto::PredictRequest predict_request;
  678. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  679. RestfulService restful_service(dispatcher_);
  680. Status status(INVALID_INPUTS);
  681. status = restful_service.ParseRequest(restful_request, &predict_request);
  682. ASSERT_EQ(status.StatusCode(), SUCCESS);
  683. nlohmann::json out_js;
  684. proto::PredictReply reply;
  685. auto instance_ptr = reply.add_instances();
  686. auto &map_item = *(instance_ptr->mutable_items());
  687. // test scalar:
  688. // scalar:key_int
  689. proto::Tensor tensor_int;
  690. ProtoTensor pb_tensor_int(&tensor_int);
  691. DataType type_int = kMSI_Int32;
  692. pb_tensor_int.set_data_type(type_int);
  693. pb_tensor_int.set_shape({1});
  694. pb_tensor_int.resize_data(pb_tensor_int.GetTypeSize(type_int));
  695. auto data_int = reinterpret_cast<int32_t *>(pb_tensor_int.mutable_data());
  696. *data_int = 1;
  697. map_item["key_int"] = tensor_int;
  698. // scalar: key_bool
  699. proto::Tensor tensor_bool;
  700. ProtoTensor pb_tensor_bool(&tensor_bool);
  701. DataType type_bool = kMSI_Bool;
  702. pb_tensor_bool.set_data_type(type_bool);
  703. pb_tensor_bool.resize_data(pb_tensor_bool.GetTypeSize(type_bool));
  704. auto data_bool = reinterpret_cast<bool *>(pb_tensor_bool.mutable_data());
  705. *data_bool = false;
  706. map_item["key_bool"] = tensor_bool;
  707. // scalar: key_float
  708. proto::Tensor tensor_float;
  709. ProtoTensor pb_tensor_float(&tensor_float);
  710. DataType type_float = kMSI_Float32;
  711. pb_tensor_float.set_data_type(type_float);
  712. pb_tensor_float.set_shape({1});
  713. pb_tensor_float.resize_data(pb_tensor_float.GetTypeSize(type_float));
  714. auto data_float = reinterpret_cast<float *>(pb_tensor_float.mutable_data());
  715. *data_float = 2.3;
  716. map_item["key_float"] = tensor_float;
  717. // scalar: key_str
  718. string value = "ut_test";
  719. proto::Tensor tensor_str;
  720. ProtoTensor pb_tensor_str(&tensor_str);
  721. DataType type_str = kMSI_String;
  722. pb_tensor_str.set_data_type(type_str);
  723. pb_tensor_str.add_bytes_data(reinterpret_cast<uint8_t *>(value.data()), value.length());
  724. map_item["key_str"] = tensor_str;
  725. // scalar: key_bytes
  726. string value_bytes = "ut_test";
  727. proto::Tensor tensor_bytes;
  728. ProtoTensor pb_tensor_bytes(&tensor_bytes);
  729. DataType type_bytes = kMSI_Bytes;
  730. pb_tensor_bytes.set_data_type(type_bytes);
  731. pb_tensor_bytes.add_bytes_data(reinterpret_cast<uint8_t *>(value_bytes.data()), value_bytes.length());
  732. map_item["key_bytes"] = tensor_bytes;
  733. // test tensor:
  734. auto instance_ptr2 = reply.add_instances();
  735. auto &map_item2 = *(instance_ptr2->mutable_items());
  736. // tensor int:
  737. vector<int32_t> tensor_value_int = {1, 2, 3};
  738. proto::Tensor tensor_int2;
  739. ProtoTensor pb_tensor_int2(&tensor_int2);
  740. DataType type_int2 = kMSI_Int32;
  741. pb_tensor_int2.set_data_type(type_int2);
  742. pb_tensor_int2.set_shape({3});
  743. pb_tensor_int2.resize_data(pb_tensor_int2.GetTypeSize(type_int2) * 3);
  744. for (int i = 0; i < 3; i++) {
  745. auto data_int2 = reinterpret_cast<int32_t *>(pb_tensor_int2.mutable_data()) + i;
  746. *data_int2 = tensor_value_int[i];
  747. }
  748. map_item2["key_int"] = tensor_int2;
  749. // tensor: key_bool
  750. vector<vector<bool>> tensor_value_bool = {{true, false}, {false, true}};
  751. proto::Tensor tensor_bool2;
  752. ProtoTensor pb_tensor_bool2(&tensor_bool2);
  753. DataType type_bool2 = kMSI_Bool;
  754. pb_tensor_bool2.set_data_type(type_bool2);
  755. pb_tensor_bool2.set_shape({2, 2});
  756. pb_tensor_bool2.resize_data(pb_tensor_bool2.GetTypeSize(type_bool2) * 4);
  757. for (int i = 0; i < 2; i++) {
  758. for (int j = 0; j < 2; j++) {
  759. auto data_bool2 = reinterpret_cast<bool *>(pb_tensor_bool2.mutable_data()) + i * 2 + j;
  760. *data_bool2 = tensor_value_bool[i][j];
  761. }
  762. }
  763. map_item2["key_bool"] = tensor_bool2;
  764. // tensor: key_float
  765. vector<vector<float>> tensor_value_float = {{1.1, 2.2}};
  766. proto::Tensor tensor_float2;
  767. ProtoTensor pb_tensor_float2(&tensor_float2);
  768. DataType type_float2 = kMSI_Float32;
  769. pb_tensor_float2.set_data_type(type_float2);
  770. pb_tensor_float2.set_shape({1, 2});
  771. pb_tensor_float2.resize_data(pb_tensor_float2.GetTypeSize(type_float2) * 2);
  772. for (int i = 0; i < 1; i++) {
  773. for (int j = 0; j < 2; j++) {
  774. auto data_float2 = reinterpret_cast<float *>(pb_tensor_float2.mutable_data()) + i * 1 + j;
  775. *data_float2 = tensor_value_float[i][j];
  776. }
  777. }
  778. map_item2["key_float"] = tensor_float2;
  779. // tensor: key_str
  780. vector<string> tensor_value_str = {"ut_test", "ut_test2"};
  781. proto::Tensor tensor_str2;
  782. ProtoTensor pb_tensor_str2(&tensor_str2);
  783. DataType type_str2 = kMSI_String;
  784. pb_tensor_str2.set_data_type(type_str2);
  785. pb_tensor_str2.set_shape({2});
  786. for (int i = 0; i < 2; i++) {
  787. pb_tensor_str2.add_bytes_data(reinterpret_cast<uint8_t *>(tensor_value_str[i].data()),
  788. tensor_value_str[i].length());
  789. }
  790. map_item2["key_str"] = tensor_str2;
  791. Status status2 = restful_service.ParseReply(reply, &out_js);
  792. ASSERT_EQ(status2.StatusCode(), SUCCESS);
  793. string out_str = out_js.dump();
  794. std::cout << "Parse reply out:" << out_str << std::endl;
  795. ASSERT_TRUE(out_js.is_object());
  796. for (auto &item : out_js.items()) {
  797. ASSERT_EQ(item.key(), "instances");
  798. ASSERT_TRUE(item.value().is_array());
  799. ASSERT_EQ(item.value().size(), 2);
  800. int sum = 0;
  801. // array
  802. for (auto &element : item.value()) {
  803. ASSERT_TRUE(element.is_object());
  804. if (element.size() == 5) {
  805. int count = 0;
  806. // object
  807. std::cout << "===start====" << std::endl;
  808. for (auto &it : element.items()) {
  809. if (it.key() == "key_int") {
  810. ASSERT_EQ(it.value(), 1);
  811. count++;
  812. } else if (it.key() == "key_bool") {
  813. ASSERT_EQ(it.value(), false);
  814. count++;
  815. } else if (it.key() == "key_float") {
  816. ASSERT_FLOAT_EQ(it.value(), 2.3);
  817. count++;
  818. } else if (it.key() == "key_str") {
  819. ASSERT_EQ(it.value(), "ut_test");
  820. count++;
  821. } else if (it.key() == "key_bytes") {
  822. ASSERT_TRUE(it.value().is_object());
  823. ASSERT_EQ(it.value()["b64"], "dXRfdGVzdA==");
  824. count++;
  825. }
  826. }
  827. ASSERT_EQ(count, 5);
  828. sum++;
  829. } else if (element.size() == 4) {
  830. int count = 0;
  831. // object
  832. for (auto &it : element.items()) {
  833. if (it.key() == "key_int") {
  834. ASSERT_TRUE(it.value().is_array());
  835. ASSERT_EQ(it.value().size(), 3);
  836. ASSERT_EQ(it.value()[0], 1);
  837. ASSERT_EQ(it.value()[1], 2);
  838. ASSERT_EQ(it.value()[2], 3);
  839. count++;
  840. } else if (it.key() == "key_bool") {
  841. ASSERT_TRUE(it.value().is_array());
  842. ASSERT_EQ(it.value().size(), 2);
  843. ASSERT_TRUE(it.value()[0].is_array());
  844. ASSERT_EQ(it.value()[0].size(), 2);
  845. ASSERT_EQ(it.value()[0][0], true);
  846. ASSERT_EQ(it.value()[0][1], false);
  847. ASSERT_EQ(it.value()[1].size(), 2);
  848. ASSERT_EQ(it.value()[1][0], false);
  849. ASSERT_EQ(it.value()[1][1], true);
  850. count++;
  851. } else if (it.key() == "key_float") {
  852. ASSERT_TRUE(it.value().is_array());
  853. ASSERT_EQ(it.value().size(), 1);
  854. ASSERT_TRUE(it.value()[0].is_array());
  855. ASSERT_EQ(it.value()[0].size(), 2);
  856. ASSERT_FLOAT_EQ(it.value()[0][0], 1.1);
  857. ASSERT_FLOAT_EQ(it.value()[0][1], 2.2);
  858. count++;
  859. } else if (it.key() == "key_str") {
  860. ASSERT_TRUE(it.value().is_array());
  861. ASSERT_EQ(it.value().size(), 2);
  862. ASSERT_EQ(it.value()[0], "ut_test");
  863. ASSERT_EQ(it.value()[1], "ut_test2");
  864. count++;
  865. }
  866. }
  867. ASSERT_EQ(count, 4);
  868. sum++;
  869. }
  870. }
  871. ASSERT_EQ(sum, 2);
  872. }
  873. }
  874. TEST_F(TestParseReply, test_reply_instances_num_not_match_FAIL) {
  875. nlohmann::json js = R"(
  876. {"instances":[
  877. {
  878. "key_tag":"scalar",
  879. "key_int": 1,
  880. "key_bool": false,
  881. "key_float": 2.3,
  882. "key_str": "ut_test",
  883. "key_bytes": {"b64": "dXRfdGVzdA==", "type": "bytes"}
  884. }
  885. ]
  886. }
  887. )"_json;
  888. struct evhttp_request *request = new evhttp_request();
  889. int size = 100;
  890. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  891. request_msg->request_message_ = js;
  892. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  893. proto::PredictRequest predict_request;
  894. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  895. RestfulService restful_service(dispatcher_);
  896. Status status(INVALID_INPUTS);
  897. status = restful_service.ParseRequest(restful_request, &predict_request);
  898. ASSERT_EQ(status.StatusCode(), SUCCESS);
  899. nlohmann::json out_js;
  900. proto::PredictReply reply;
  901. auto instance_ptr = reply.add_instances();
  902. auto &map_item = *(instance_ptr->mutable_items());
  903. // test scalar:
  904. // scalar:key_int
  905. proto::Tensor tensor_int;
  906. ProtoTensor pb_tensor_int(&tensor_int);
  907. DataType type_int = kMSI_Int32;
  908. pb_tensor_int.set_data_type(type_int);
  909. pb_tensor_int.set_shape({1});
  910. pb_tensor_int.resize_data(pb_tensor_int.GetTypeSize(type_int));
  911. auto data_int = reinterpret_cast<int32_t *>(pb_tensor_int.mutable_data());
  912. *data_int = 1;
  913. map_item["key_int"] = tensor_int;
  914. // scalar: key_bool
  915. proto::Tensor tensor_bool;
  916. ProtoTensor pb_tensor_bool(&tensor_bool);
  917. DataType type_bool = kMSI_Bool;
  918. pb_tensor_bool.set_data_type(type_bool);
  919. pb_tensor_bool.resize_data(pb_tensor_bool.GetTypeSize(type_bool));
  920. auto data_bool = reinterpret_cast<bool *>(pb_tensor_bool.mutable_data());
  921. *data_bool = false;
  922. map_item["key_bool"] = tensor_bool;
  923. // scalar: key_float
  924. proto::Tensor tensor_float;
  925. ProtoTensor pb_tensor_float(&tensor_float);
  926. DataType type_float = kMSI_Float32;
  927. pb_tensor_float.set_data_type(type_float);
  928. pb_tensor_float.set_shape({1});
  929. pb_tensor_float.resize_data(pb_tensor_float.GetTypeSize(type_float));
  930. auto data_float = reinterpret_cast<float *>(pb_tensor_float.mutable_data());
  931. *data_float = 2.3;
  932. map_item["key_float"] = tensor_float;
  933. // scalar: key_str
  934. string value = "ut_test";
  935. proto::Tensor tensor_str;
  936. ProtoTensor pb_tensor_str(&tensor_str);
  937. DataType type_str = kMSI_String;
  938. pb_tensor_str.set_data_type(type_str);
  939. pb_tensor_str.add_bytes_data(reinterpret_cast<uint8_t *>(value.data()), value.length());
  940. map_item["key_str"] = tensor_str;
  941. // scalar: key_bytes
  942. string value_bytes = "ut_test";
  943. proto::Tensor tensor_bytes;
  944. ProtoTensor pb_tensor_bytes(&tensor_bytes);
  945. DataType type_bytes = kMSI_Bytes;
  946. pb_tensor_bytes.set_data_type(type_bytes);
  947. pb_tensor_bytes.add_bytes_data(reinterpret_cast<uint8_t *>(value_bytes.data()), value_bytes.length());
  948. map_item["key_bytes"] = tensor_bytes;
  949. // test tensor:
  950. auto instance_ptr2 = reply.add_instances();
  951. auto &map_item2 = *(instance_ptr2->mutable_items());
  952. // tensor int:
  953. vector<int32_t> tensor_value_int = {1, 2, 3};
  954. proto::Tensor tensor_int2;
  955. ProtoTensor pb_tensor_int2(&tensor_int2);
  956. DataType type_int2 = kMSI_Int32;
  957. pb_tensor_int2.set_data_type(type_int2);
  958. pb_tensor_int2.set_shape({3});
  959. pb_tensor_int2.resize_data(pb_tensor_int2.GetTypeSize(type_int2) * 3);
  960. for (int i = 0; i < 3; i++) {
  961. auto data_int2 = reinterpret_cast<int32_t *>(pb_tensor_int2.mutable_data()) + i;
  962. *data_int2 = tensor_value_int[i];
  963. }
  964. map_item2["key_int"] = tensor_int2;
  965. // tensor: key_bool
  966. vector<vector<bool>> tensor_value_bool = {{true, false}, {false, true}};
  967. proto::Tensor tensor_bool2;
  968. ProtoTensor pb_tensor_bool2(&tensor_bool2);
  969. DataType type_bool2 = kMSI_Bool;
  970. pb_tensor_bool2.set_data_type(type_bool2);
  971. pb_tensor_bool2.set_shape({2, 2});
  972. pb_tensor_bool2.resize_data(pb_tensor_bool2.GetTypeSize(type_bool2) * 4);
  973. for (int i = 0; i < 2; i++) {
  974. for (int j = 0; j < 2; j++) {
  975. auto data_bool2 = reinterpret_cast<bool *>(pb_tensor_bool2.mutable_data()) + i * 2 + j;
  976. *data_bool2 = tensor_value_bool[i][j];
  977. }
  978. }
  979. map_item2["key_bool"] = tensor_bool2;
  980. // tensor: key_float
  981. vector<vector<float>> tensor_value_float = {{1.1, 2.2}};
  982. proto::Tensor tensor_float2;
  983. ProtoTensor pb_tensor_float2(&tensor_float2);
  984. DataType type_float2 = kMSI_Float32;
  985. pb_tensor_float2.set_data_type(type_float2);
  986. pb_tensor_float2.set_shape({1, 2});
  987. pb_tensor_float2.resize_data(pb_tensor_float2.GetTypeSize(type_float2) * 2);
  988. for (int i = 0; i < 1; i++) {
  989. for (int j = 0; j < 2; j++) {
  990. auto data_float2 = reinterpret_cast<float *>(pb_tensor_float2.mutable_data()) + i * 1 + j;
  991. *data_float2 = tensor_value_float[i][j];
  992. }
  993. }
  994. map_item2["key_float"] = tensor_float2;
  995. // tensor: key_str
  996. vector<string> tensor_value_str = {"ut_test", "ut_test2"};
  997. proto::Tensor tensor_str2;
  998. ProtoTensor pb_tensor_str2(&tensor_str2);
  999. DataType type_str2 = kMSI_String;
  1000. pb_tensor_str2.set_data_type(type_str2);
  1001. pb_tensor_str2.set_shape({2});
  1002. for (int i = 0; i < 2; i++) {
  1003. pb_tensor_str2.add_bytes_data(reinterpret_cast<uint8_t *>(tensor_value_str[i].data()),
  1004. tensor_value_str[i].length());
  1005. }
  1006. map_item2["key_str"] = tensor_str2;
  1007. Status status2 = restful_service.ParseReply(reply, &out_js);
  1008. ASSERT_NE(status2.StatusCode(), SUCCESS);
  1009. }
  1010. TEST_F(TestParseReply, test_reply_error_num_not_match_FAIL) {
  1011. nlohmann::json js = R"(
  1012. {"instances":[
  1013. {
  1014. "key_tag":"scalar",
  1015. "key_int": 1,
  1016. "key_bool": false,
  1017. "key_float": 2.3,
  1018. "key_str": "ut_test",
  1019. "key_bytes": {"b64": "dXRfdGVzdA==", "type": "bytes"}
  1020. }
  1021. ]
  1022. }
  1023. )"_json;
  1024. struct evhttp_request *request = new evhttp_request();
  1025. int size = 100;
  1026. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  1027. request_msg->request_message_ = js;
  1028. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  1029. proto::PredictRequest predict_request;
  1030. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  1031. RestfulService restful_service(dispatcher_);
  1032. Status status(INVALID_INPUTS);
  1033. status = restful_service.ParseRequest(restful_request, &predict_request);
  1034. ASSERT_EQ(status.StatusCode(), SUCCESS);
  1035. nlohmann::json out_js;
  1036. proto::PredictReply reply;
  1037. auto error_msg = reply.add_error_msg();
  1038. error_msg->set_error_msg("error1");
  1039. auto error_msg2 = reply.add_error_msg();
  1040. error_msg2->set_error_msg("error2");
  1041. Status status2 = restful_service.ParseReply(reply, &out_js);
  1042. ASSERT_NE(status2.StatusCode(), SUCCESS);
  1043. }
  1044. TEST_F(TestParseReply, test_reply_type_not_set_FAIL) {
  1045. nlohmann::json js = R"(
  1046. {"instances":[
  1047. {
  1048. "key_tag":"scalar",
  1049. "key_int": 1,
  1050. "key_bool": false,
  1051. "key_float": 2.3,
  1052. "key_str": "ut_test",
  1053. "key_bytes": {"b64": "dXRfdGVzdA==", "type": "bytes"}
  1054. }
  1055. ]
  1056. }
  1057. )"_json;
  1058. struct evhttp_request *request = new evhttp_request();
  1059. int size = 100;
  1060. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  1061. request_msg->request_message_ = js;
  1062. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  1063. proto::PredictRequest predict_request;
  1064. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  1065. RestfulService restful_service(dispatcher_);
  1066. Status status(INVALID_INPUTS);
  1067. status = restful_service.ParseRequest(restful_request, &predict_request);
  1068. ASSERT_EQ(status.StatusCode(), SUCCESS);
  1069. nlohmann::json out_js;
  1070. proto::PredictReply reply;
  1071. auto instance_ptr = reply.add_instances();
  1072. auto &map_item = *(instance_ptr->mutable_items());
  1073. // test scalar:
  1074. // scalar:key_int
  1075. proto::Tensor tensor_int;
  1076. ProtoTensor pb_tensor_int(&tensor_int);
  1077. pb_tensor_int.set_shape({1});
  1078. pb_tensor_int.resize_data(pb_tensor_int.GetTypeSize(kMSI_Int32));
  1079. auto data_int = reinterpret_cast<int32_t *>(pb_tensor_int.mutable_data());
  1080. *data_int = 1;
  1081. map_item["key_int"] = tensor_int;
  1082. Status status2 = restful_service.ParseReply(reply, &out_js);
  1083. ASSERT_NE(status2.StatusCode(), SUCCESS);
  1084. }
  1085. TEST_F(TestParseReply, test_reply_type_fp16_FAIL) {
  1086. nlohmann::json js = R"(
  1087. {"instances":[
  1088. {
  1089. "key_tag":"scalar",
  1090. "key_int": 1,
  1091. "key_bool": false,
  1092. "key_float": 2.3,
  1093. "key_str": "ut_test",
  1094. "key_bytes": {"b64": "dXRfdGVzdA==", "type": "bytes"}
  1095. }
  1096. ]
  1097. }
  1098. )"_json;
  1099. struct evhttp_request *request = new evhttp_request();
  1100. int size = 100;
  1101. std::shared_ptr<DecomposeEvRequest> request_msg = std::make_shared<DecomposeEvRequest>(request, size);
  1102. request_msg->request_message_ = js;
  1103. std::shared_ptr<RestfulRequest> restful_request = std::make_shared<RestfulRequest>(request_msg);
  1104. proto::PredictRequest predict_request;
  1105. std::shared_ptr<Dispatcher> dispatcher_ = Server::Instance().GetDispatcher();
  1106. RestfulService restful_service(dispatcher_);
  1107. Status status(INVALID_INPUTS);
  1108. status = restful_service.ParseRequest(restful_request, &predict_request);
  1109. ASSERT_EQ(status.StatusCode(), SUCCESS);
  1110. nlohmann::json out_js;
  1111. proto::PredictReply reply;
  1112. auto instance_ptr = reply.add_instances();
  1113. auto &map_item = *(instance_ptr->mutable_items());
  1114. // test scalar:
  1115. // scalar: key_float
  1116. proto::Tensor tensor_float;
  1117. ProtoTensor pb_tensor_float(&tensor_float);
  1118. DataType type_float = kMSI_Float16;
  1119. pb_tensor_float.set_data_type(type_float);
  1120. pb_tensor_float.set_shape({1});
  1121. pb_tensor_float.resize_data(pb_tensor_float.GetTypeSize(type_float));
  1122. map_item["key_float16"] = tensor_float;
  1123. Status status2 = restful_service.ParseReply(reply, &out_js);
  1124. ASSERT_NE(status2.StatusCode(), SUCCESS);
  1125. }
  1126. } // namespace serving
  1127. } // namespace mindspore

A lightweight and high-performance service module that helps MindSpore developers efficiently deploy online inference services in the production environment.