You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

ge_executor.cc 40 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "executor/ge_executor.h"
  17. #include <cce/cce.h>
  18. #include <cce/compiler_stub.h>
  19. #include <ctime>
  20. #include <iostream>
  21. #include "common/debug/log.h"
  22. #include "common/ge/ge_util.h"
  23. #include "common/helper/model_helper.h"
  24. #include "common/profiling/profiling_manager.h"
  25. #include "common/dump/dump_manager.h"
  26. #include "common/util.h"
  27. #include "framework/common/debug/ge_log.h"
  28. #include "framework/common/util.h"
  29. #include "graph/execute/graph_execute.h"
  30. #include "graph/load/graph_loader.h"
  31. #include "graph/load/new_model_manager/davinci_model_parser.h"
  32. #include "graph/load/new_model_manager/model_manager.h"
  33. #include "graph/manager/graph_mem_allocator.h"
  34. #include "graph/model.h"
  35. #include "graph/utils/graph_utils.h"
  36. #include "mmpa/mmpa_api.h"
  37. #include "single_op/single_op_manager.h"
  38. #include "graph/manager/graph_var_manager.h"
  39. #include "graph/load/new_model_manager/davinci_model.h"
  40. using std::string;
  41. using std::vector;
  42. namespace {
  43. const size_t kDynamicBatchSizeVecSize = 1;
  44. const size_t kStaticBatchInfoSize = 1;
  45. const size_t kDynamicImageSizeVecSize = 2;
  46. const size_t kDynamicImageSizeInputSize = 2;
  47. const char *const kBatchLabel = "Batch_";
  48. ge::Status TransferDomiErrorCode(const uint32_t errorCode) {
  49. switch (errorCode) {
  50. case ge::PARAM_INVALID:
  51. case domi::PARAM_INVALID:
  52. return ge::PARAM_INVALID;
  53. case ge::INTERNAL_ERROR:
  54. case domi::INTERNAL_ERROR:
  55. return ge::INTERNAL_ERROR;
  56. default:
  57. return ge::FAILED;
  58. }
  59. }
  60. void GetGeTensorDescFromDomiInfo(std::vector<ge::TensorDesc> &ge_descs,
  61. const std::vector<ge::InputOutputDescInfo> &domi_descs,
  62. const std::vector<uint32_t> &formats) {
  63. uint32_t idx = 0;
  64. for (auto desc_item : domi_descs) {
  65. ge::TensorDesc ge_desc;
  66. ge_desc.SetName(desc_item.name);
  67. ge_desc.SetDataType(static_cast<ge::DataType>(desc_item.data_type));
  68. ge_desc.SetFormat(static_cast<ge::Format>(formats[idx]));
  69. std::vector<int64_t> shape_dims;
  70. for (auto dim : desc_item.shape_info.dims) {
  71. shape_dims.push_back(dim);
  72. }
  73. ge::Shape ge_shape(shape_dims);
  74. ge_desc.SetShape(ge_shape);
  75. ge_desc.SetSize(desc_item.size);
  76. ge_descs.emplace_back(ge_desc);
  77. ++idx;
  78. }
  79. }
  80. void GetDomiInputData(const ge::RunModelData &input_data, ge::InputData &inputs) {
  81. inputs.index = input_data.index;
  82. inputs.model_id = input_data.modelId;
  83. inputs.timestamp = input_data.timestamp;
  84. inputs.timeout = input_data.timeout;
  85. inputs.request_id = input_data.request_id;
  86. for (const auto &data_item : input_data.blobs) {
  87. ge::DataBuffer dataBuf{data_item.data, data_item.length, data_item.isDataSupportMemShare};
  88. inputs.blobs.emplace_back(dataBuf);
  89. }
  90. }
  91. void GetDomiOutputData(const ge::RunModelData &output_data, ge::OutputData &outputs) {
  92. outputs.index = output_data.index;
  93. outputs.model_id = output_data.modelId;
  94. for (const auto &data_item : output_data.blobs) {
  95. ge::DataBuffer dataBuf(data_item.data, data_item.length, data_item.isDataSupportMemShare);
  96. outputs.blobs.emplace_back(dataBuf);
  97. }
  98. }
  99. void SetDynamicInputDataFlag(const ge::RunModelData &input_data, const std::vector<std::vector<int64_t>> batch_info,
  100. ge::InputData &inputs) {
  101. inputs.is_dynamic_batch = true;
  102. std::string batch_label;
  103. size_t match_idx = 0;
  104. for (size_t i = 0; i < batch_info.size(); ++i) {
  105. // dynamic_dims
  106. if (input_data.dynamic_dims.size() != 0) {
  107. bool is_match = true;
  108. for (size_t j = 0; j < static_cast<size_t>(input_data.dynamic_dims.size()); ++j) {
  109. if (static_cast<uint64_t>(batch_info[i][j]) != input_data.dynamic_dims[j]) {
  110. is_match = false;
  111. break;
  112. }
  113. }
  114. if (is_match) {
  115. match_idx = i;
  116. break;
  117. }
  118. // dynamic_batch_size
  119. } else if (batch_info[i].size() == kDynamicBatchSizeVecSize &&
  120. batch_info[i][0] == static_cast<int64_t>(input_data.dynamic_batch_size)) {
  121. match_idx = i;
  122. break;
  123. // dynamic_image_size
  124. } else if (batch_info[i].size() == kDynamicImageSizeVecSize &&
  125. batch_info[i][0] == static_cast<int64_t>(input_data.dynamic_image_height) &&
  126. batch_info[i][1] == static_cast<int64_t>(input_data.dynamic_image_width)) {
  127. match_idx = i;
  128. break;
  129. }
  130. }
  131. batch_label = kBatchLabel + std::to_string(match_idx);
  132. inputs.batch_label = batch_label;
  133. GELOGI("current batch label:%s", batch_label.c_str());
  134. }
  135. bool IsDynamicBatchSizeMatchModel(uint64_t batch_size, const vector<std::vector<int64_t>> &batch_info) {
  136. if (batch_info.empty()) {
  137. GELOGE(ge::FAILED, "Dynamic batch info is empty.");
  138. return false;
  139. }
  140. for (auto batch : batch_info) {
  141. if (batch.size() != kDynamicBatchSizeVecSize) {
  142. GELOGE(ge::FAILED, "Dynamic batch param num is %zu, current batch size is %zu.", kDynamicBatchSizeVecSize,
  143. batch.size());
  144. return false;
  145. }
  146. if (batch[0] == static_cast<int64_t>(batch_size)) {
  147. return true;
  148. }
  149. }
  150. GELOGE(ge::FAILED, "Dynamic batch %lu can not match the gear of model.", batch_size);
  151. return false;
  152. }
  153. bool IsDynamicImageSizeMatchModel(uint64_t image_height, uint64_t image_width,
  154. const vector<std::vector<int64_t>> &batch_info) {
  155. if (batch_info.empty()) {
  156. GELOGE(ge::FAILED, "Dynamic batch info is empty.");
  157. return false;
  158. }
  159. for (auto resolution : batch_info) {
  160. if (resolution.size() != kDynamicImageSizeVecSize) {
  161. GELOGE(ge::FAILED, "Dynamic resolution param num is %zu, current resolution size is %zu.",
  162. kDynamicImageSizeVecSize, resolution.size());
  163. return false;
  164. }
  165. if (resolution[0] == static_cast<int64_t>(image_height) && resolution[1] == static_cast<int64_t>(image_width)) {
  166. return true;
  167. }
  168. }
  169. GELOGE(ge::FAILED, "Dynamic resolution (%lu,%lu) can not match the gear of model.", image_height, image_width);
  170. return false;
  171. }
  172. } // namespace
  173. namespace ge {
  174. bool GeExecutor::isInit_ = false;
  175. class ModelListenerAdapter : public ModelListener {
  176. public:
  177. domi::Status OnComputeDone(uint32_t model_id, uint32_t dataIndex, uint32_t resultCode,
  178. std::vector<ge::OutputTensorInfo> &outputs) {
  179. if (listener == nullptr) {
  180. GELOGE(ge::FAILED, "listener is null.");
  181. return FAILED;
  182. }
  183. return listener->OnComputeDone(model_id, dataIndex, resultCode, outputs);
  184. }
  185. std::shared_ptr<ge::ModelListener> listener;
  186. };
  187. GeExecutor::GeExecutor() {}
  188. Status GeExecutor::Initialize() {
  189. GELOGI("Init GeExecutor begin.");
  190. if (isInit_) {
  191. GELOGW("Already initialized, no need to be initialized again.");
  192. return ge::SUCCESS;
  193. }
  194. std::vector<rtMemType_t> mem_type(1, RT_MEMORY_HBM);
  195. auto ret = MemManager::Instance().Initialize(mem_type);
  196. if (ret != SUCCESS) {
  197. GELOGE(ret, "Memory Manager init failed.");
  198. return ret;
  199. }
  200. // Start profiling
  201. Options profiling_options;
  202. profiling_options.device_id = 0;
  203. profiling_options.job_id = "";
  204. ProfilingManager::Instance().Init(profiling_options);
  205. isInit_ = true;
  206. GELOGI("Init GeExecutor over.");
  207. return ge::SUCCESS;
  208. }
  209. Status GeExecutor::Finalize() {
  210. GELOGI("Uninit GeExecutor begin.");
  211. if (isInit_ == false) {
  212. GELOGW("GeExecutor has not been initialized.");
  213. return ge::SUCCESS;
  214. }
  215. // Stop profiling
  216. if (ProfilingManager::Instance().ProfilingOn()) {
  217. ProfilingManager::Instance().StopProfiling();
  218. ProfilingManager::Instance().PluginUnInit(GE_PROFILING_MODULE);
  219. }
  220. GELOGI("Uninit GeExecutor over.");
  221. return ge::SUCCESS;
  222. }
  223. Status GeExecutor::SetDynamicBatchSize(uint32_t model_id, void *dynamic_input_addr, uint64_t length,
  224. uint64_t batch_size) {
  225. if (dynamic_input_addr == nullptr) {
  226. GELOGE(PARAM_INVALID, "Dynamic input addr is nullptr!");
  227. return PARAM_INVALID;
  228. }
  229. uint64_t size = sizeof(uint64_t);
  230. if (length < size) {
  231. GELOGE(PARAM_INVALID, "Dynamic input size [%lu] is less than [%lu]!", length, size);
  232. return PARAM_INVALID;
  233. }
  234. // Verify whether the input dynamic batch matches the model gear
  235. std::vector<std::vector<int64_t>> batch_info;
  236. std::vector<uint64_t> batch_num{batch_size};
  237. int32_t dynamic_type = static_cast<int32_t>(FIXED);
  238. Status ret = GraphExecutor::GetDynamicBatchInfo(model_id, batch_info, dynamic_type);
  239. if (ret != SUCCESS) {
  240. GELOGE(ret, "Get dynamic input info failed.");
  241. return ret;
  242. }
  243. if (!IsDynamicBatchSizeMatchModel(batch_size, batch_info)) {
  244. GELOGE(PARAM_INVALID, "The current dynamic input does not match the gear of the model.");
  245. return PARAM_INVALID;
  246. }
  247. ret = GraphExecutor::SetDynamicSize(model_id, batch_num, static_cast<int32_t>(DYNAMIC_BATCH));
  248. if (ret != SUCCESS) {
  249. GELOGE(ret, "Set dynamic size failed");
  250. return ret;
  251. }
  252. // memcpy dynamic_batch_size from host to device
  253. rtError_t rt_ret = rtMemcpy(dynamic_input_addr, length, &batch_size, size, RT_MEMCPY_HOST_TO_DEVICE);
  254. if (rt_ret != RT_ERROR_NONE) {
  255. GELOGE(RT_FAILED, "memcpy dynamic batch input data failed! ret: 0x%X", rt_ret);
  256. return RT_ERROR_TO_GE_STATUS(rt_ret);
  257. }
  258. return SUCCESS;
  259. }
  260. Status GeExecutor::SetDynamicImageSize(uint32_t model_id, void *dynamic_input_addr, uint64_t length,
  261. uint64_t image_height, uint64_t image_width) {
  262. if (dynamic_input_addr == nullptr) {
  263. GELOGE(PARAM_INVALID, "Dynamic input addr is nullptr!");
  264. return PARAM_INVALID;
  265. }
  266. uint64_t dynamic_input_size = kDynamicImageSizeInputSize * sizeof(uint64_t);
  267. if (length < dynamic_input_size) {
  268. GELOGE(PARAM_INVALID, "Dynamic input size [%lu] is less than [%lu]!", length, dynamic_input_size);
  269. return PARAM_INVALID;
  270. }
  271. // Verify whether the input dynamic resolution matches the model gear
  272. std::vector<std::vector<int64_t>> batch_info;
  273. std::vector<uint64_t> batch_num{image_height, image_width};
  274. int32_t dynamic_type = static_cast<int32_t>(FIXED);
  275. Status ret = GraphExecutor::GetDynamicBatchInfo(model_id, batch_info, dynamic_type);
  276. if (ret != SUCCESS) {
  277. GELOGE(ret, "Get dynamic input info failed.");
  278. return ret;
  279. }
  280. if (!IsDynamicImageSizeMatchModel(image_height, image_width, batch_info)) {
  281. GELOGE(PARAM_INVALID, "The current dynamic input does not match the gear of the model.");
  282. return PARAM_INVALID;
  283. }
  284. ret = GraphExecutor::SetDynamicSize(model_id, batch_num, static_cast<int32_t>(DYNAMIC_IMAGE));
  285. if (ret != SUCCESS) {
  286. GELOGE(ret, "Set dynamic size failed");
  287. return ret;
  288. }
  289. // Memcpy dynamic resolution height from host to device
  290. rtError_t rt_ret =
  291. rtMemcpy(dynamic_input_addr, sizeof(uint64_t), &image_height, sizeof(uint64_t), RT_MEMCPY_HOST_TO_DEVICE);
  292. if (rt_ret != RT_ERROR_NONE) {
  293. GELOGE(RT_FAILED, "memcpy dynamic resolution input data failed! ret: 0x%X", rt_ret);
  294. return RT_ERROR_TO_GE_STATUS(rt_ret);
  295. }
  296. uint64_t remain_size = length - sizeof(uint64_t);
  297. // Memcpy dynamic resolution width from host to device
  298. if (rtMemcpy(reinterpret_cast<void *>(reinterpret_cast<uint8_t *>(dynamic_input_addr) + sizeof(uint64_t)),
  299. remain_size, &image_width, sizeof(uint64_t), RT_MEMCPY_HOST_TO_DEVICE) != RT_ERROR_NONE) {
  300. GELOGE(FAILED, "memcpy dynamic resolution input data failed!");
  301. return FAILED;
  302. }
  303. return SUCCESS;
  304. }
  305. Status GeExecutor::SetDynamicDims(uint32_t model_id, void *dynamic_input_addr, uint64_t length,
  306. const vector<uint64_t> &dynamic_dims) {
  307. if (dynamic_input_addr == nullptr) {
  308. GELOGE(FAILED, "Dynamic input addr is nullptr!");
  309. return FAILED;
  310. }
  311. Status ret = GraphExecutor::SetDynamicSize(model_id, dynamic_dims, static_cast<int32_t>(DYNAMIC_DIMS));
  312. if (ret != SUCCESS) {
  313. GELOGE(FAILED, "Set dynamic size failed");
  314. return FAILED;
  315. }
  316. vector<uint64_t> cur_dynamic_dims;
  317. std::vector<ge::TensorDesc> input_desc;
  318. std::vector<ge::TensorDesc> output_desc;
  319. ret = GetModelDescInfo(model_id, input_desc, output_desc);
  320. if (ret != ge::SUCCESS) {
  321. GELOGE(FAILED, "GetModelDescInfo failed.");
  322. return FAILED;
  323. }
  324. vector<string> user_designate_shape_order;
  325. vector<int64_t> all_data_dims;
  326. ret = GetUserDesignateShapeOrder(model_id, user_designate_shape_order);
  327. if (ret != ge::SUCCESS) {
  328. GELOGE(FAILED, "GetUserDesignateShapeOrder failed.");
  329. return FAILED;
  330. }
  331. for (auto &data_name : user_designate_shape_order) {
  332. for (size_t j = 0; j < input_desc.size(); ++j) {
  333. if (input_desc.at(j).GetName() == data_name) {
  334. for (auto dim : input_desc.at(j).GetShape().GetDims()) {
  335. all_data_dims.push_back(dim);
  336. }
  337. break;
  338. }
  339. }
  340. }
  341. if (dynamic_dims.size() != all_data_dims.size()) {
  342. GELOGE(FAILED, "Dynamic input size [%lu] is not equal with all data dims size [%lu]!", dynamic_dims.size(),
  343. all_data_dims.size());
  344. return FAILED;
  345. }
  346. for (std::size_t i = 0; i < all_data_dims.size(); ++i) {
  347. if (all_data_dims[i] < 0) {
  348. cur_dynamic_dims.push_back(dynamic_dims[i]);
  349. }
  350. }
  351. size_t dynamic_dim_num = cur_dynamic_dims.size();
  352. uint64_t dynamic_input_size = static_cast<uint64_t>(dynamic_dim_num * sizeof(uint64_t));
  353. if (length < dynamic_input_size) {
  354. GELOGE(FAILED, "Dynamic input size [%lu] is less than [%lu]!", length, dynamic_input_size);
  355. return FAILED;
  356. }
  357. for (uint32_t i = 0; i < dynamic_dim_num; ++i) {
  358. // Memcpy dynamic dim[i] from host to device
  359. if (rtMemcpy(reinterpret_cast<void *>(reinterpret_cast<uint8_t *>(dynamic_input_addr) + sizeof(uint64_t) * i),
  360. length - sizeof(uint64_t) * i, &cur_dynamic_dims[i], sizeof(uint64_t),
  361. RT_MEMCPY_HOST_TO_DEVICE) != RT_ERROR_NONE) {
  362. GELOGE(FAILED, "memcpy dynamic resolution input data failed!");
  363. return FAILED;
  364. }
  365. }
  366. return SUCCESS;
  367. }
  368. Status GeExecutor::GetCurDynamicDims(uint32_t model_id, const vector<uint64_t> &combined_dims,
  369. vector<uint64_t> &cur_dynamic_dims) {
  370. vector<vector<int64_t>> combined_batch;
  371. if (GraphExecutor::GetCombinedDynamicDims(model_id, combined_batch) != SUCCESS) {
  372. GELOGE(FAILED, "Get combined dynamic dims info failed.");
  373. return FAILED;
  374. }
  375. if (combined_batch.empty()) {
  376. GELOGE(FAILED, "Combined dynamic dims is empty.");
  377. return FAILED;
  378. }
  379. if (combined_dims.size() != combined_batch[0].size()) {
  380. GELOGE(FAILED, "Input dynamic dims's dimension size[%zu] is different from model[%zu].", combined_dims.size(),
  381. combined_batch[0].size());
  382. return FAILED;
  383. }
  384. bool matched = false;
  385. size_t idx = 0;
  386. for (size_t i = 0; i < combined_batch.size(); i++) {
  387. bool is_match = true;
  388. for (size_t j = 0; j < combined_dims.size(); j++) {
  389. if (combined_dims[j] != static_cast<uint64_t>(combined_batch[i][j])) {
  390. is_match = false;
  391. break;
  392. }
  393. }
  394. if (is_match) {
  395. idx = i;
  396. matched = true;
  397. break;
  398. }
  399. }
  400. if (!matched) {
  401. GELOGE(FAILED, "Input dynamic dims can not match model.");
  402. return FAILED;
  403. }
  404. // batch_info save the dynamic info of combined_dims
  405. vector<vector<int64_t>> batch_info;
  406. int32_t dynamic_type = static_cast<int32_t>(FIXED);
  407. if (GraphExecutor::GetDynamicBatchInfo(model_id, batch_info, dynamic_type) != SUCCESS) {
  408. GELOGE(FAILED, "Get dynamic input info failed.");
  409. return FAILED;
  410. }
  411. cur_dynamic_dims.clear();
  412. for (size_t i = 0; i < batch_info[idx].size(); i++) {
  413. cur_dynamic_dims.emplace_back(static_cast<uint64_t>(batch_info[idx][i]));
  414. }
  415. return SUCCESS;
  416. }
  417. Status GeExecutor::GetCurShape(const uint32_t model_id, std::vector<int64_t> &batch_info, int32_t &dynamic_type) {
  418. GELOGI("Begin to get current shape");
  419. if (!isInit_) {
  420. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  421. return GE_EXEC_NOT_INIT;
  422. }
  423. Status ret = GraphExecutor::GetCurShape(model_id, batch_info, dynamic_type);
  424. if (ret != SUCCESS) {
  425. GELOGE(ret, "Get current shape failed");
  426. return ret;
  427. }
  428. return SUCCESS;
  429. }
  430. Status GeExecutor::SetDynamicAippData(uint32_t model_id, void *dynamic_input_addr, uint64_t length,
  431. const std::vector<kAippDynamicBatchPara> &aippBatchPara,
  432. const kAippDynamicPara &aippParms) {
  433. GELOGI("Enter to SetDynamicAippData.");
  434. if (dynamic_input_addr == nullptr) {
  435. GELOGE(PARAM_INVALID, "Dynamic aipp input addr is nullptr!");
  436. return PARAM_INVALID;
  437. }
  438. if (aippBatchPara.empty()) {
  439. GELOGE(PARAM_INVALID, "aippBatchPara is empty.");
  440. return PARAM_INVALID;
  441. }
  442. uint64_t batch_num = aippBatchPara.size();
  443. uint64_t real_aippParms_size = sizeof(kAippDynamicPara) - sizeof(kAippDynamicBatchPara);
  444. uint64_t struct_len = batch_num * sizeof(kAippDynamicBatchPara) + real_aippParms_size;
  445. GELOGI(
  446. "Get acl input dynamic aipp data, model_id is %u, length is %lu,"
  447. "batch num is %lu, struct_len is %lu",
  448. model_id, length, batch_num, struct_len);
  449. if (struct_len > length) {
  450. GELOGE(PARAM_INVALID, "input dynamic aipp param len [%lu] is larger than aipp_data size [%lu]", struct_len, length);
  451. return PARAM_INVALID;
  452. }
  453. // Memcpy real kAippDynamicBatchPara from host to device
  454. rtError_t rt_ret = rtMemcpy(dynamic_input_addr, length, &aippParms, real_aippParms_size, RT_MEMCPY_HOST_TO_DEVICE);
  455. if (rt_ret != RT_ERROR_NONE) {
  456. GELOGE(RT_FAILED, "memcpy real_aippParms_size failed! ret: 0x%X", rt_ret);
  457. return RT_ERROR_TO_GE_STATUS(rt_ret);
  458. }
  459. uint64_t remain_len = length - real_aippParms_size;
  460. uint8_t *aipp_batch_para_dev = reinterpret_cast<uint8_t *>(dynamic_input_addr) + real_aippParms_size;
  461. for (uint64_t i = 0; i < batch_num; ++i) {
  462. rt_ret = rtMemcpy(reinterpret_cast<void *>(aipp_batch_para_dev + i * sizeof(kAippDynamicBatchPara)),
  463. (remain_len - i * sizeof(kAippDynamicBatchPara)), &(aippBatchPara[i]),
  464. sizeof(kAippDynamicBatchPara), RT_MEMCPY_HOST_TO_DEVICE);
  465. if (rt_ret != RT_ERROR_NONE) {
  466. GELOGE(RT_FAILED, "memcpy kAippDynamicBatchPara input data failed! ret: 0x%X", rt_ret);
  467. return RT_ERROR_TO_GE_STATUS(rt_ret);
  468. }
  469. }
  470. return SUCCESS;
  471. }
  472. // Load model
  473. Status GeExecutor::LoadModelOffline(uint32_t &model_id, const std::string &path, const std::string &key,
  474. int32_t priority, std::shared_ptr<ge::ModelListener> listener) {
  475. GELOGI("load model offline begin.");
  476. if (!isInit_) {
  477. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  478. return GE_EXEC_NOT_INIT;
  479. }
  480. string filePath = RealPath(path.c_str());
  481. if (filePath.empty()) {
  482. GELOGE(ge::FAILED, "File path is invalid. please check your text file '%s'.", path.c_str());
  483. return ge::FAILED;
  484. }
  485. std::shared_ptr<ModelListenerAdapter> listener_adapter = MakeShared<ModelListenerAdapter>();
  486. if (listener_adapter == nullptr) {
  487. GELOGE(MEMALLOC_FAILED, "ModelListenerAdapter make shared failed!");
  488. return ge::FAILED;
  489. }
  490. listener_adapter->listener = listener;
  491. Status ret = GraphLoader::LoadModelFromFile(path, key, priority, listener_adapter, model_id);
  492. if (ret != SUCCESS) {
  493. GELOGE(ret, "[GeExecutor] LoadModelFromFile failed");
  494. return TransferDomiErrorCode(ret);
  495. }
  496. return SUCCESS;
  497. }
  498. Status GeExecutor::LoadModel(uint32_t &model_id, const ModelData &model_data,
  499. std::shared_ptr<ge::ModelListener> listener) {
  500. GELOGI("Load model begin.");
  501. if (!isInit_) {
  502. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  503. return GE_EXEC_NOT_INIT;
  504. }
  505. std::shared_ptr<ModelListenerAdapter> listener_adapter = MakeShared<ModelListenerAdapter>();
  506. if (listener_adapter == nullptr) {
  507. GELOGE(MEMALLOC_FAILED, "ModelListenerAdapter make shared failed!");
  508. return ge::FAILED;
  509. }
  510. listener_adapter->listener = listener;
  511. Status ret = GraphLoader::LoadModel(model_data, listener_adapter, model_id);
  512. if (ret != SUCCESS) {
  513. GELOGE(ret, "[GeExecutor] LoadModel failed.");
  514. return TransferDomiErrorCode(ret);
  515. }
  516. return ret;
  517. }
  518. Status GeExecutor::UnloadModel(uint32_t model_id) {
  519. GELOGI("unload model %u begin.", model_id);
  520. if (!isInit_) {
  521. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  522. return GE_EXEC_NOT_INIT;
  523. }
  524. Status ret = GraphLoader::DestroyAicpuSessionForInfer(model_id);
  525. if (ret != SUCCESS) {
  526. GELOGE(ret, "[GraphLoader] DestroyAicpuSessionForInfer failed. model id: %u", model_id);
  527. return FAILED;
  528. }
  529. std::shared_ptr<DavinciModel> davinci_model = ModelManager::GetInstance()->GetModel(model_id);
  530. if (davinci_model != nullptr) {
  531. uint64_t session_id = davinci_model->GetSessionId();
  532. VarManagerPool::Instance().RemoveVarManager(session_id);
  533. }
  534. return GraphLoader::UnloadModel(model_id);
  535. }
  536. Status GeExecutor::RunModel(const ge::RunModelData &input_data, ge::RunModelData &output_data) {
  537. GELOGI("run model begin.");
  538. if (!isInit_) {
  539. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  540. return GE_EXEC_NOT_INIT;
  541. }
  542. InputData inputs;
  543. GetDomiInputData(input_data, inputs);
  544. OutputData outputs;
  545. GetDomiOutputData(output_data, outputs);
  546. return GraphExecutor::DataInput(inputs, outputs);
  547. }
  548. // Get input and output descriptor
  549. Status GeExecutor::GetModelDescInfo(uint32_t model_id, std::vector<ge::TensorDesc> &input_desc,
  550. std::vector<ge::TensorDesc> &output_desc, bool new_model_desc) {
  551. GELOGI("get model desc info begin.");
  552. if (!isInit_) {
  553. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  554. return GE_EXEC_NOT_INIT;
  555. }
  556. std::vector<InputOutputDescInfo> input_desc_infos;
  557. std::vector<InputOutputDescInfo> output_desc_infos;
  558. std::vector<uint32_t> input_formats;
  559. std::vector<uint32_t> output_formats;
  560. Status ret = GraphExecutor::GetInputOutputDescInfo(model_id, input_desc_infos, output_desc_infos, input_formats,
  561. output_formats, new_model_desc);
  562. if (ret != domi::SUCCESS) {
  563. GELOGE(ret, "GetInputOutputDescInfo failed. ret = %u", ret);
  564. return ret;
  565. }
  566. if (input_formats.size() != input_desc_infos.size()) {
  567. GELOGE(ge::PARAM_INVALID, "input_formats size %zu is not equal to input_desc_infos size %zu.", input_formats.size(),
  568. input_desc_infos.size());
  569. return ge::PARAM_INVALID;
  570. }
  571. if (output_formats.size() != output_desc_infos.size()) {
  572. GELOGE(ge::PARAM_INVALID, "output_formats size %zu is not equal to output_desc_infos size %zu.",
  573. output_formats.size(), output_desc_infos.size());
  574. return ge::PARAM_INVALID;
  575. }
  576. // Transfer data to TensorDesc
  577. GetGeTensorDescFromDomiInfo(input_desc, input_desc_infos, input_formats);
  578. GetGeTensorDescFromDomiInfo(output_desc, output_desc_infos, output_formats);
  579. GELOGI("get model desc info end.");
  580. return ge::SUCCESS;
  581. }
  582. ///
  583. /// @ingroup ge
  584. /// @brief Get dynamic batch_info
  585. /// @param [in] model_id
  586. /// @param [out] batch_info
  587. /// @param [out] dynamic_type
  588. /// @return execute result
  589. ///
  590. Status GeExecutor::GetDynamicBatchInfo(uint32_t model_id, std::vector<std::vector<int64_t>> &batch_info,
  591. int32_t &dynamic_type) {
  592. GELOGI("Begin to get dynamic batch info.");
  593. if (!isInit_) {
  594. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  595. return GE_EXEC_NOT_INIT;
  596. }
  597. Status ret = GraphExecutor::GetDynamicBatchInfo(model_id, batch_info, dynamic_type);
  598. if (ret != SUCCESS) {
  599. GELOGE(ret, "GetDynamicBatchInfo failed.");
  600. return ret;
  601. }
  602. GELOGI("Get dynamic batch info succ.");
  603. return SUCCESS;
  604. }
  605. ///
  606. /// @ingroup ge
  607. /// @brief Get combined dynamic dims info
  608. /// @param [in] model_id
  609. /// @param [out] batch_info
  610. /// @return execute result
  611. ///
  612. Status GeExecutor::GetCombinedDynamicDims(uint32_t model_id, vector<vector<int64_t>> &batch_info) {
  613. GELOGI("Begin to get combined dynamic dims info.");
  614. if (!isInit_) {
  615. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  616. return GE_EXEC_NOT_INIT;
  617. }
  618. Status ret = GraphExecutor::GetCombinedDynamicDims(model_id, batch_info);
  619. if (ret != SUCCESS) {
  620. GELOGE(ret, "GetCombinedDynamicDims failed.");
  621. return ret;
  622. }
  623. GELOGI("Get combined dynamic dims succ.");
  624. return SUCCESS;
  625. }
  626. ///
  627. /// @ingroup ge
  628. /// @brief Get user designeate shape order
  629. /// @param [in] model_id
  630. /// @param [out] user_designate_shape_order
  631. /// @return execute result
  632. ///
  633. Status GeExecutor::GetUserDesignateShapeOrder(uint32_t model_id, vector<string> &user_designate_shape_order) {
  634. GELOGI("Begin to get user designate shape info.");
  635. if (!isInit_) {
  636. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  637. return GE_EXEC_NOT_INIT;
  638. }
  639. Status ret = GraphExecutor::GetUserDesignateShapeOrder(model_id, user_designate_shape_order);
  640. if (ret != SUCCESS) {
  641. GELOGE(ret, "GetUserDesignateShapeOrder failed.");
  642. return ret;
  643. }
  644. GELOGI("Get user designate shape order succ.");
  645. return SUCCESS;
  646. }
  647. ///
  648. /// @ingroup ge
  649. /// @brief Get AIPP input format
  650. /// @param [in] model_id
  651. /// @param [in] index
  652. /// @param [out] input_format
  653. /// @return execute result
  654. ///
  655. Status GeExecutor::GetAIPPInfo(uint32_t model_id, uint32_t index, AippConfigInfo &aipp_info) {
  656. GELOGI("Begin to GetAIPPInfo.");
  657. if (!isInit_) {
  658. GELOGE(GE_EXEC_NOT_INIT, "not inited yet!");
  659. return GE_EXEC_NOT_INIT;
  660. }
  661. Status ret = GraphExecutor::GetAIPPInfo(model_id, index, aipp_info);
  662. if (ret != SUCCESS) {
  663. GELOGW("GetAIPPInfo is not success.");
  664. return ret;
  665. }
  666. GELOGI("GetAIPPInfo succ.");
  667. return SUCCESS;
  668. }
  669. Status GeExecutor::GetModelAttr(uint32_t model_id, std::vector<std::string> &dynamic_output_shape_info) {
  670. GELOGI("Begin to get dynamic batch output shape info");
  671. if (!isInit_) {
  672. GELOGE(GE_EXEC_NOT_INIT, "not inited yet!");
  673. return GE_EXEC_NOT_INIT;
  674. }
  675. Status ret = GraphExecutor::GetModelAttr(model_id, dynamic_output_shape_info);
  676. if (ret != SUCCESS) {
  677. GELOGE(ret, "Get dynamic batch output shape info failed.");
  678. return ret;
  679. }
  680. GELOGI("Get dynamic batch output shape info succ.");
  681. return SUCCESS;
  682. }
  683. Status GeExecutor::GetModelDescInfoForZeroCopy(uint32_t model_id, std::vector<ge::TensorDesc> &input_desc,
  684. std::vector<TensorDesc> &output_desc) {
  685. GELOGI("get model desc info for zero copy begin.");
  686. if (!isInit_) {
  687. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  688. return GE_EXEC_NOT_INIT;
  689. }
  690. std::vector<InputOutputDescInfo> input_desc_infos;
  691. std::vector<InputOutputDescInfo> output_desc_infos;
  692. std::vector<uint32_t> input_formats;
  693. std::vector<uint32_t> output_formats;
  694. Status ret = GraphExecutor::GetInputOutputDescInfoForZeroCopy(model_id, input_desc_infos, output_desc_infos,
  695. input_formats, output_formats);
  696. if (ret != domi::SUCCESS) {
  697. GELOGE(ret, "Get DescInfo from zero copy failed. ret = %u", ret);
  698. return TransferDomiErrorCode(ret);
  699. }
  700. if (input_formats.size() != input_desc_infos.size()) {
  701. GELOGE(ge::FAILED, "input_formats.size() != input_desc_infos.size().");
  702. return ge::FAILED;
  703. }
  704. if (output_formats.size() != output_desc_infos.size()) {
  705. GELOGE(ge::FAILED, "output_formats.size() != output_desc_infos.size().");
  706. return ge::FAILED;
  707. }
  708. GetGeTensorDescFromDomiInfo(input_desc, input_desc_infos, input_formats);
  709. GetGeTensorDescFromDomiInfo(output_desc, output_desc_infos, output_formats);
  710. GELOGI("get model desc info from zero copy end.");
  711. return ge::SUCCESS;
  712. }
  713. Status GeExecutor::CommandHandle(const Command &command) {
  714. GELOGI("command handle begin.");
  715. Status ret = GraphLoader::CommandHandle(command);
  716. if (ret != SUCCESS) {
  717. GELOGE(ret, "CommandHandle: Command Handle failed.");
  718. return TransferDomiErrorCode(ret);
  719. }
  720. return SUCCESS;
  721. }
  722. Status GeExecutor::GetMaxUsedMemory(uint32_t model_id, uint32_t &max_size) {
  723. GELOGI("Get max used memory begin.");
  724. if (!isInit_) {
  725. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  726. return GE_EXEC_NOT_INIT;
  727. }
  728. uint64_t max_mem_size = 0;
  729. Status ret = GraphLoader::GetMaxUsedMemory(model_id, max_mem_size);
  730. max_size = static_cast<uint32_t>(max_mem_size);
  731. return ret;
  732. }
  733. /**
  734. * @ingroup ge
  735. * @brief Load data from model file to memory
  736. * @param [in] const std::string &path: Offline model file path
  737. * @param [out] domi::ModelData &model_data: Offline model memory data
  738. * @return SUCCESS handle successfully / others handle failed
  739. */
  740. Status GeExecutor::LoadDataFromFile(const std::string &path, ModelData &model_data) {
  741. GELOGI("Load data from file begin.");
  742. if (!isInit_) {
  743. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  744. return GE_EXEC_NOT_INIT;
  745. }
  746. string filePath = RealPath(path.c_str());
  747. if (filePath.empty()) {
  748. GELOGE(GE_EXEC_MODEL_PATH_INVALID, "File path is invalid. please check your text file '%s'.", path.c_str());
  749. return GE_EXEC_MODEL_PATH_INVALID;
  750. }
  751. GELOGI("load modelData from file: %s.", path.c_str());
  752. std::string key_path;
  753. int32_t priority = 0;
  754. Status ret = GraphLoader::LoadDataFromFile(path, key_path, priority, model_data);
  755. if (ret != SUCCESS) {
  756. if (model_data.model_data != nullptr) {
  757. delete[] static_cast<char *>(model_data.model_data);
  758. model_data.model_data = nullptr;
  759. }
  760. }
  761. return ret;
  762. }
  763. /**
  764. * @ingroup ge
  765. * @brief Load model from offline model memory data
  766. * @param [in] domi::ModelData &model_data: Offline model data
  767. void *dev_ptr: Input/Output memory start address
  768. size_t memsize: Input/Output memory length
  769. void *weight_ptr: Weight memory start address
  770. size_t weightsize: Weight memory length
  771. * @param [out] uint32_t &model_id: identification after model loading
  772. * @return SUCCESS handle successfully / others handle failed
  773. */
  774. Status GeExecutor::LoadModelFromData(uint32_t &model_id, const ModelData &model_data, void *dev_ptr, size_t mem_size,
  775. void *weight_ptr, size_t weight_size) {
  776. GELOGI("Load model from data begin.");
  777. if (!isInit_) {
  778. GELOGE(GE_EXEC_NOT_INIT, "not inited yet!");
  779. return GE_EXEC_NOT_INIT;
  780. }
  781. return GraphLoader::LoadModelFromData(model_id, model_data, dev_ptr, mem_size, weight_ptr, weight_size);
  782. }
  783. /**
  784. * @ingroup ge
  785. * @brief Load task list from ModelData with queue.
  786. * @param [out] model_id: model id allocate from manager.
  787. * @param [in] ge_model_data: Model data load from offline model.
  788. * @param [in] input_queue_ids: input queue ids create from user.
  789. * @param [in] output_queue_ids: input queue ids create from user.
  790. * @return: 0 for success / others for fail
  791. */
  792. Status GeExecutor::LoadModelWithQ(uint32_t &model_id, const ModelData &model_data,
  793. const std::vector<uint32_t> &input_queue_ids,
  794. const std::vector<uint32_t> &output_queue_ids) {
  795. GELOGI("Load model with queue begin.");
  796. if (!isInit_) {
  797. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  798. return GE_EXEC_NOT_INIT;
  799. }
  800. return GraphLoader::LoadModelWithQ(model_id, model_data, input_queue_ids, output_queue_ids);
  801. }
  802. /**
  803. * @ingroup ge
  804. * @brief Synchronous execution of offline model(Do not create thread)
  805. * @param [in] uint32_t model_id: Model ID to execute
  806. void* stream: stream to execute
  807. const domi::InputData *input_data: Model input data
  808. bool async_mode: is asynchronize mode.
  809. * @param [out] domi::OutputData *output_data: Model output data
  810. * @return SUCCESS handle successfully / others handle failed
  811. */
  812. Status GeExecutor::ExecModel(uint32_t model_id, void *stream, const ge::RunModelData &run_input_data,
  813. ge::RunModelData &run_output_data, bool async_mode) {
  814. GELOGI("Execute model begin.");
  815. if (!isInit_) {
  816. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  817. return GE_EXEC_NOT_INIT;
  818. }
  819. InputData input_data;
  820. OutputData output_data;
  821. GetDomiInputData(run_input_data, input_data);
  822. GetDomiOutputData(run_output_data, output_data);
  823. if ((run_input_data.dynamic_batch_size != 0) || (run_input_data.dynamic_image_width != 0) ||
  824. (run_input_data.dynamic_image_height != 0) || (run_input_data.dynamic_dims.size() != 0)) {
  825. std::vector<std::vector<int64_t>> batch_info;
  826. int32_t dynamic_type = static_cast<int32_t>(FIXED);
  827. Status ret = GraphExecutor::GetDynamicBatchInfo(model_id, batch_info, dynamic_type);
  828. if (ret != SUCCESS) {
  829. GELOGE(ret, "Get dynamic input info failed.");
  830. return ret;
  831. }
  832. if (dynamic_type == static_cast<int32_t>(DYNAMIC_DIMS)) {
  833. ret = GraphExecutor::GetCombinedDynamicDims(model_id, batch_info);
  834. if (ret != SUCCESS) {
  835. GELOGE(FAILED, "Get dynamic input info failed.");
  836. return FAILED;
  837. }
  838. }
  839. if (!batch_info.empty()) {
  840. SetDynamicInputDataFlag(run_input_data, batch_info, input_data);
  841. }
  842. }
  843. return GraphLoader::ExecuteModel(model_id, stream, async_mode, input_data, output_data);
  844. }
  845. /**
  846. * @ingroup ge
  847. * @brief Get weight memory size from model file
  848. * @param [in] const std::string &path: Offline model file path
  849. * @param [out] size_t &mem_size Execution memory size
  850. size_t &weight_size Weight memory space size
  851. * @return SUCCESS handle successfully / others handle failed
  852. */
  853. Status GeExecutor::GetMemAndWeightSize(const std::string &path, size_t &mem_size, size_t &weight_size) {
  854. GELOGI("Get memory and weight size from file begin.");
  855. if (!isInit_) {
  856. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  857. return GE_EXEC_NOT_INIT;
  858. }
  859. ModelData model;
  860. std::string key;
  861. Status ret = ge::GraphLoader::LoadDataFromFile(path, key, 0, model);
  862. if ((ret != SUCCESS) || (model.model_data == nullptr)) {
  863. GELOGE(ret, "Load data from file failed. ret = %d", ret);
  864. return ret;
  865. }
  866. ret = ge::ModelManager::GetModelMemAndWeightSize(model, mem_size, weight_size);
  867. delete[] static_cast<char *>(model.model_data);
  868. model.model_data = nullptr;
  869. return ret;
  870. }
  871. /**
  872. * @ingroup ge
  873. * @brief Get weight memory size from model file
  874. * @param [in] const void *model_data Offline model buffer
  875. size_t model_size Offline model buffer length
  876. * @param [out] size_t &mem_size Execution memory size
  877. size_t &weight_size Weight memory space size
  878. * @return SUCCESS handle successfully / others handle failed
  879. */
  880. Status GeExecutor::GetMemAndWeightSize(const void *model_data, size_t model_size, size_t &mem_size,
  881. size_t &weight_size) {
  882. GELOGI("Get memory and weight size from data begin.");
  883. if (!isInit_) {
  884. GELOGE(GE_EXEC_NOT_INIT, "GeExecutor has not been initialized!");
  885. return GE_EXEC_NOT_INIT;
  886. }
  887. if (model_data == nullptr) {
  888. GELOGE(PARAM_INVALID, "invalid model data!");
  889. return PARAM_INVALID;
  890. }
  891. ModelData model;
  892. model.model_data = const_cast<void *>(model_data);
  893. model.model_len = static_cast<uint32_t>(model_size);
  894. return ge::ModelManager::GetModelMemAndWeightSize(model, mem_size, weight_size);
  895. }
  896. Status GeExecutor::LoadSingleOp(const std::string &model_name, const ge::ModelData &modelData, void *stream,
  897. SingleOp **single_op) {
  898. return SingleOpManager::GetInstance().GetOpFromModel(model_name, modelData, stream, single_op);
  899. }
  900. Status GeExecutor::LoadDynamicSingleOp(const std::string &model_name, const ge::ModelData &modelData, void *stream,
  901. DynamicSingleOp **single_op) {
  902. return SingleOpManager::GetInstance().GetDynamicOpFromModel(model_name, modelData, stream, single_op);
  903. }
  904. Status GeExecutor::ExecuteAsync(SingleOp *executor, const std::vector<DataBuffer> &inputs,
  905. std::vector<DataBuffer> &outputs) {
  906. if (executor == nullptr) {
  907. GELOGE(PARAM_INVALID, "param is NULL");
  908. return PARAM_INVALID;
  909. }
  910. return executor->ExecuteAsync(inputs, outputs);
  911. }
  912. ge::Status GeExecutor::ExecuteAsync(DynamicSingleOp *executor, const vector<GeTensorDesc> &input_desc,
  913. const vector<DataBuffer> &inputs, vector<GeTensorDesc> &output_desc,
  914. vector<DataBuffer> &outputs) {
  915. GE_CHECK_NOTNULL(executor);
  916. return executor->ExecuteAsync(input_desc, inputs, output_desc, outputs);
  917. }
  918. Status GeExecutor::ReleaseSingleOpResource(void *stream) {
  919. return SingleOpManager::GetInstance().ReleaseResource(stream);
  920. }
  921. Status GeExecutor::GetBatchInfoSize(uint32_t model_id, size_t &shape_count) {
  922. std::vector<std::vector<int64_t>> batch_info;
  923. int32_t dynamic_type = static_cast<int32_t>(FIXED);
  924. Status ret = GetDynamicBatchInfo(model_id, batch_info, dynamic_type);
  925. if (ret != SUCCESS) {
  926. GELOGE(ret, "Calc batch info size failed. ret = %d", ret);
  927. return ret;
  928. }
  929. if (batch_info.empty()) {
  930. shape_count = kStaticBatchInfoSize;
  931. } else {
  932. shape_count = batch_info.size();
  933. }
  934. return SUCCESS;
  935. }
  936. Status GeExecutor::GetOrigInputInfo(uint32_t model_id, uint32_t index, OriginInputInfo &orig_input_info) {
  937. GELOGI("Begin to GetOrigInputInfo.");
  938. if (!isInit_) {
  939. GELOGE(GE_EXEC_NOT_INIT, "not inited yet!");
  940. return GE_EXEC_NOT_INIT;
  941. }
  942. Status ret = GraphExecutor::GetOrigInputInfo(model_id, index, orig_input_info);
  943. if (ret != SUCCESS) {
  944. GELOGE(ret, "GetOrigInputInfo failed.");
  945. return ret;
  946. }
  947. GELOGI("GetOrigInputInfo succ.");
  948. return SUCCESS;
  949. }
  950. Status GeExecutor::GetAllAippInputOutputDims(uint32_t model_id, uint32_t index,
  951. std::vector<InputOutputDims> &input_dims,
  952. std::vector<InputOutputDims> &output_dims) {
  953. GELOGI("Begin to GetAllAippInputOutputDims.");
  954. if (!isInit_) {
  955. GELOGE(GE_EXEC_NOT_INIT, "not inited yet!");
  956. return GE_EXEC_NOT_INIT;
  957. }
  958. Status ret = GraphExecutor::GetAllAippInputOutputDims(model_id, index, input_dims, output_dims);
  959. if (ret != SUCCESS) {
  960. GELOGE(ret, "GetAllAippInputOutputDims failed.");
  961. return ret;
  962. }
  963. GELOGI("GetAllAippInputOutputDims succ.");
  964. return SUCCESS;
  965. }
  966. Status GeExecutor::GetOpDescInfo(uint32_t device_id, uint32_t stream_id, uint32_t task_id, OpDescInfo &op_desc_info) {
  967. GELOGI("Begin to GetOpDescInfo.");
  968. Status ret = GraphExecutor::GetOpDescInfo(device_id, stream_id, task_id, op_desc_info);
  969. if (ret != SUCCESS) {
  970. GELOGE(ret, "GetOpDescInfo failed.");
  971. return ret;
  972. }
  973. GELOGI("GetOpDescInfo succ.");
  974. return SUCCESS;
  975. }
  976. Status GeExecutor::SetDump(const DumpConfig &dump_config) {
  977. GELOGI("Start to set dump config");
  978. auto ret = DumpManager::GetInstance().SetDumpConf(dump_config);
  979. if (ret != SUCCESS) {
  980. GELOGE(ret, "Set dump conf failed");
  981. return ret;
  982. }
  983. GELOGI("Set dump config succ.");
  984. return SUCCESS;
  985. }
  986. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知.