You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

graphdef_transform.cc 12 kB

5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "tools/converter/graphdef_transform.h"
  17. #include <string>
  18. #include <algorithm>
  19. #include "schema/model_generated.h"
  20. #include "src/common/log_adapter.h"
  21. #include "tools/converter/converter_flags.h"
  22. #include "tools/converter/legacy_optimizer/graph/dtype_trans_pass.h"
  23. #include "tools/converter/legacy_optimizer/fusion/format_trans_fusion_pass.h"
  24. #include "tools/converter/legacy_optimizer/fusion/quant_cast_fusion_pass.h"
  25. #include "tools/converter/legacy_optimizer/fusion/mul_add_fusion_pass.h"
  26. #include "tools/converter/legacy_optimizer/graph/trans_format_remove_pass.h"
  27. #include "tools/converter/legacy_optimizer/graph/infershape_pass.h"
  28. #include "tools/converter/legacy_optimizer/graph/batchnorm_convert_scale_pass.h"
  29. #include "tools/converter/legacy_optimizer/graph/format_trans_pass.h"
  30. #include "tools/converter/legacy_optimizer/graph/trans_format_insert_pass.h"
  31. #include "tools/converter/legacy_optimizer/graph/global_format_transform_pass.h"
  32. #include "tools/converter/legacy_optimizer/graph/isolated_node_remove_pass.h"
  33. #include "tools/converter/legacy_optimizer/graph/unused_node_remove_pass.h"
  34. #include "tools/converter/legacy_optimizer/graph/dropout_node_remove_pass.h"
  35. #include "tools/converter/legacy_optimizer/graph/topological_sort_pass.h"
  36. #include "tools/converter/legacy_optimizer/graph/tensor_quant_pass.h"
  37. #include "tools/converter/legacy_optimizer/graph/tensor_name_pass.h"
  38. #include "tools/converter/legacy_optimizer/graph/infer_quant_param_pass.h"
  39. #include "tools/converter/legacy_optimizer/graph/set_unused_quant_param_to_default_pass.h"
  40. #include "tools/converter/legacy_optimizer/graph/switch_pass.h"
  41. #include "tools/converter/legacy_optimizer/graph/select_pass.h"
  42. #include "tools/converter/legacy_optimizer/graph/subgraph_node_pass.h"
  43. #include "tools/converter/legacy_optimizer/graph/subgraph_tensor_pass.h"
  44. #include "tools/converter/legacy_optimizer/graph/nested_loop_expand_pass.h"
  45. using std::string;
  46. namespace mindspore::lite {
  47. std::vector<schema::CNodeT *> GraphDefTransform::GetGraphNodes() {
  48. std::vector<schema::CNodeT *> old_nodes{};
  49. old_nodes.resize(graphDefT->nodes.size());
  50. std::transform(graphDefT->nodes.begin(), graphDefT->nodes.end(), old_nodes.begin(),
  51. [](const std::unique_ptr<schema::CNodeT> &node) { return node.get(); });
  52. return old_nodes;
  53. }
  54. GraphDefTransform::GraphDefTransform() = default;
  55. GraphDefTransform::~GraphDefTransform() = default;
  56. void GraphDefTransform::SetGraphDef(schema::MetaGraphT *_dstDef) { graphDefT = _dstDef; }
  57. int GraphDefTransform::Transform(const converter::Flags &ctx) {
  58. STATUS status;
  59. {
  60. auto old_nodes = GetGraphNodes();
  61. Optimizer unusedOpRemoveOptimizer;
  62. unusedOpRemoveOptimizer.AddPass(new UnusedNodeRemovePass());
  63. if (!ctx.trainModel) {
  64. unusedOpRemoveOptimizer.AddPass(new DropoutNodeRemovePass());
  65. }
  66. unusedOpRemoveOptimizer.AddPass(new IsolatedNodeRemovePass());
  67. unusedOpRemoveOptimizer.AddPass(new SubgraphNodePass(old_nodes));
  68. status = unusedOpRemoveOptimizer.Run(graphDefT);
  69. if (status != RET_OK && status != RET_NO_CHANGE) {
  70. MS_LOG(ERROR) << "Run unusedOpRemoveOptimizer graphPasses Failed";
  71. return status;
  72. }
  73. }
  74. // generate and infer quant parameters
  75. {
  76. Optimizer inferQuantParamPass;
  77. inferQuantParamPass.AddPass(new (std::nothrow) TopologicalSortPass());
  78. inferQuantParamPass.AddPass(new (std::nothrow) InferQuantParamPass());
  79. status = inferQuantParamPass.Run(graphDefT);
  80. if (status != RET_OK && status != RET_NO_CHANGE) {
  81. MS_LOG(ERROR) << "Run topologicalOptimizer graphPasses Failed";
  82. return status;
  83. }
  84. }
  85. {
  86. // format transform
  87. // init old node indices
  88. auto old_nodes = GetGraphNodes();
  89. Optimizer formatTransOptimizer;
  90. auto formatTransPass = new (std::nothrow) FormatTransPass();
  91. if (formatTransPass == nullptr) {
  92. MS_LOG(ERROR) << "new formatTransPass failed";
  93. return RET_MEMORY_FAILED;
  94. }
  95. formatTransPass->SetQuantType(ctx.quantType);
  96. formatTransPass->SetFmk(ctx.fmk);
  97. formatTransOptimizer.AddPass(formatTransPass);
  98. formatTransOptimizer.AddPass(new (std::nothrow) SubgraphNodePass(old_nodes));
  99. formatTransOptimizer.AddPass(new (std::nothrow) TopologicalSortPass());
  100. if (ctx.fmk != converter::FmkType_TF) {
  101. formatTransOptimizer.AddPass(new (std::nothrow) InferShapePass());
  102. }
  103. status = formatTransOptimizer.Run(graphDefT);
  104. if (status != RET_OK && status != RET_NO_CHANGE && status != RET_INFER_INVALID) {
  105. MS_LOG(ERROR) << "Run formatTransOptimizer graphPasses Failed";
  106. return status;
  107. }
  108. }
  109. {
  110. // init old node indices
  111. auto old_nodes = GetGraphNodes();
  112. Optimizer formatTransOptimizer;
  113. formatTransOptimizer.AddPass(new (std::nothrow) FormatTransFusionPass());
  114. formatTransOptimizer.AddPass(new (std::nothrow) IsolatedNodeRemovePass());
  115. formatTransOptimizer.AddPass(new (std::nothrow) TransOpRemovePass());
  116. formatTransOptimizer.AddPass(new (std::nothrow) TransOpInsertPass());
  117. formatTransOptimizer.AddPass(new (std::nothrow) FormatTransFusionPass());
  118. formatTransOptimizer.AddPass(new (std::nothrow) IsolatedNodeRemovePass());
  119. formatTransOptimizer.AddPass(new (std::nothrow) SubgraphNodePass(old_nodes));
  120. status = formatTransOptimizer.Run(graphDefT);
  121. if (status != RET_OK && status != RET_NO_CHANGE && status != RET_INFER_INVALID) {
  122. MS_LOG(ERROR) << "Run formatTransOptimizer graphPasses Failed";
  123. return status;
  124. }
  125. }
  126. {
  127. // init old node indices
  128. auto old_nodes = GetGraphNodes();
  129. Optimizer formatTransOptimizer;
  130. if (!ctx.trainModel && ctx.fmk != converter::FmkType_ONNX) {
  131. formatTransOptimizer.AddPass(new (std::nothrow) GlobalFormatTransformPass());
  132. formatTransOptimizer.AddPass(new (std::nothrow) IsolatedNodeRemovePass());
  133. formatTransOptimizer.AddPass(new (std::nothrow) SubgraphNodePass(old_nodes));
  134. }
  135. status = formatTransOptimizer.Run(graphDefT);
  136. if (status != RET_OK && status != RET_NO_CHANGE && status != RET_INFER_INVALID) {
  137. MS_LOG(ERROR) << "Run formatTransOptimizer graphPasses Failed";
  138. return status;
  139. }
  140. }
  141. // postconvert pass
  142. {
  143. // init old node indices
  144. auto old_nodes = GetGraphNodes();
  145. Optimizer fusionOptimizer;
  146. if (!ctx.trainModel) {
  147. auto batch_norm_scale_pass = new (std::nothrow) BatchNormConvertScalePass();
  148. if (batch_norm_scale_pass == nullptr) {
  149. MS_LOG(ERROR) << "new batch_norm_scale_pass failed.";
  150. return RET_ERROR;
  151. }
  152. batch_norm_scale_pass->SetFmk(ctx.fmk);
  153. fusionOptimizer.AddPass(batch_norm_scale_pass);
  154. }
  155. fusionOptimizer.AddPass(new (std::nothrow) IsolatedNodeRemovePass());
  156. fusionOptimizer.AddPass(new SubgraphNodePass(old_nodes));
  157. status = fusionOptimizer.Run(graphDefT);
  158. if (status != RET_OK && status != RET_NO_CHANGE) {
  159. MS_LOG(ERROR) << "Run fusionOptimizer BatchNormConvertScalePass Failed";
  160. return status;
  161. }
  162. }
  163. {
  164. // init old node indices
  165. auto old_nodes = GetGraphNodes();
  166. Optimizer fusionOptimizer;
  167. fusionOptimizer.AddPass(new (std::nothrow) MulAddFusionPass());
  168. fusionOptimizer.AddPass(new (std::nothrow) IsolatedNodeRemovePass());
  169. fusionOptimizer.AddPass(new (std::nothrow) SubgraphNodePass(old_nodes));
  170. status = fusionOptimizer.Run(graphDefT);
  171. if (status != RET_OK && status != RET_NO_CHANGE) {
  172. MS_LOG(ERROR) << "Run fusionOptimizer graphPasses Failed";
  173. return status;
  174. }
  175. }
  176. // do quantization
  177. if (ctx.fmk != converter::FmkType_TF) {
  178. // init old node indices
  179. auto old_nodes = GetGraphNodes();
  180. Optimizer tensorQuantOptimizer;
  181. tensorQuantOptimizer.AddPass(new (std::nothrow) TopologicalSortPass());
  182. tensorQuantOptimizer.AddPass(new (std::nothrow) InferShapePass());
  183. tensorQuantOptimizer.AddPass(new (std::nothrow) TensorQuantPass());
  184. tensorQuantOptimizer.AddPass(new (std::nothrow) SubgraphNodePass(old_nodes));
  185. status = tensorQuantOptimizer.Run(graphDefT);
  186. if (status != RET_OK) {
  187. MS_LOG(ERROR) << "DoQuantize failed!";
  188. return status;
  189. }
  190. }
  191. // insert quantNode and deQuantNode
  192. if (ctx.fmk != converter::FmkType_TF) {
  193. // init old node indices
  194. auto old_nodes = GetGraphNodes();
  195. Optimizer quantNodeOptimizer;
  196. auto dTypeTransPass = new (std::nothrow) DTypeTransPass();
  197. if (dTypeTransPass == nullptr) {
  198. MS_LOG(ERROR) << "new dTypeTransPass failed";
  199. return RET_MEMORY_FAILED;
  200. }
  201. dTypeTransPass->SetInputDataDType(ctx.inputDataType);
  202. dTypeTransPass->SetOutputDataDType(ctx.outputDataType);
  203. quantNodeOptimizer.AddPass(new (std::nothrow) SubgraphNodePass(old_nodes));
  204. quantNodeOptimizer.AddPass(new (std::nothrow) TopologicalSortPass());
  205. quantNodeOptimizer.AddPass(new (std::nothrow) InferShapePass());
  206. status = quantNodeOptimizer.Run(graphDefT);
  207. if (status != RET_OK && status != RET_NO_CHANGE) {
  208. MS_LOG(ERROR) << "Run quantNodeOptimizer graphPasses Failed";
  209. return status;
  210. }
  211. auto old_nodes2 = GetGraphNodes();
  212. quantNodeOptimizer.AddPass(dTypeTransPass);
  213. quantNodeOptimizer.AddPass(new (std::nothrow) QuantCastFusionPass());
  214. quantNodeOptimizer.AddPass(new (std::nothrow) IsolatedNodeRemovePass());
  215. quantNodeOptimizer.AddPass(new (std::nothrow) SubgraphNodePass(old_nodes2));
  216. status = quantNodeOptimizer.Run(graphDefT);
  217. if (status != RET_OK && status != RET_NO_CHANGE) {
  218. MS_LOG(ERROR) << "Run quantNodeOptimizer graphPasses Failed";
  219. return status;
  220. }
  221. }
  222. // switch pass
  223. {
  224. // init old node indices
  225. auto old_nodes = GetGraphNodes();
  226. Optimizer switchOptimizer;
  227. switchOptimizer.AddPass(new (std::nothrow) SwitchPass());
  228. switchOptimizer.AddPass(new (std::nothrow) IsolatedNodeRemovePass());
  229. switchOptimizer.AddPass(new (std::nothrow) SubgraphNodePass(old_nodes));
  230. status = switchOptimizer.Run(graphDefT);
  231. if (status != RET_OK && status != RET_NO_CHANGE) {
  232. MS_LOG(ERROR) << "Run switch graphPasses Failed";
  233. return status;
  234. }
  235. }
  236. // subgraph tensor pass
  237. {
  238. Optimizer subgraphTensorOptimizer;
  239. subgraphTensorOptimizer.AddPass(new (std::nothrow) SubgraphTensorPass());
  240. status = subgraphTensorOptimizer.Run(graphDefT);
  241. if (status != RET_OK && status != RET_NO_CHANGE) {
  242. MS_LOG(ERROR) << "Run subgraph tensor pass Failed";
  243. return status;
  244. }
  245. }
  246. // tensor name
  247. {
  248. // init old node indices
  249. auto old_nodes = GetGraphNodes();
  250. Optimizer nameOptimizer;
  251. nameOptimizer.AddPass(new (std::nothrow) SubgraphNodePass(old_nodes));
  252. nameOptimizer.AddPass(new (std::nothrow) TopologicalSortPass());
  253. nameOptimizer.AddPass(new (std::nothrow) TensorNamePass());
  254. status = nameOptimizer.Run(graphDefT);
  255. if (status != RET_OK && status != RET_NO_CHANGE) {
  256. MS_LOG(ERROR) << "Run nameOptimizer graphPasses Failed";
  257. return status;
  258. }
  259. }
  260. {
  261. Optimizer nestedLoopOptimizer;
  262. nestedLoopOptimizer.AddPass(new (std::nothrow) NestedLoopExpandPass());
  263. status = nestedLoopOptimizer.Run(graphDefT);
  264. if (status != RET_OK && status != RET_NO_CHANGE) {
  265. MS_LOG(ERROR) << "Run nestedLoopOptimizer graphPasses Failed";
  266. return status;
  267. }
  268. }
  269. {
  270. Optimizer quantNodeOptimizer;
  271. quantNodeOptimizer.AddPass(new (std::nothrow) SetUnusedQuantParamToDefaultPass());
  272. status = quantNodeOptimizer.Run(graphDefT);
  273. if (status != RET_OK && status != RET_NO_CHANGE) {
  274. MS_LOG(ERROR) << "Run quantNodeOptimizer graphPasses Failed";
  275. return status;
  276. }
  277. }
  278. return RET_OK;
  279. } // namespace mindspore::lite
  280. } // namespace mindspore::lite