You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_tensorflow_parser.cc 166 kB

4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
3 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
3 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include <gtest/gtest.h>
  17. #define protected public
  18. #define private public
  19. #include "parser/common/op_parser_factory.h"
  20. #include "parser/tensorflow/tensorflow_parser.h"
  21. #include "graph/operator_reg.h"
  22. #include "register/op_registry.h"
  23. #include "external/register/register.h"
  24. #include "parser/common/register_tbe.h"
  25. #include "st/parser_st_utils.h"
  26. #include "tests/depends/ops_stub/ops_stub.h"
  27. #include "parser/common/acl_graph_parser_util.h"
  28. #include "metadef/third_party/graphengine/inc/external/ge/ge_api_types.h"
  29. #include "omg/parser/parser_factory.h"
  30. #include "common/pre_checker.h"
  31. #include "common/util.h"
  32. #include "external/parser/tensorflow_parser.h"
  33. #include "parser/tensorflow/tensorflow_constant_parser.h"
  34. #include "common/types.h"
  35. #include "parser/common/op_def/variable_op.h"
  36. #include "parser/tensorflow/tensorflow_ref_switch_parser.h"
  37. #include "parser/tensorflow/tensorflow_fusion_op_parser.h"
  38. #include "parser/tensorflow/tensorflow_auto_mapping_parser_adapter.h"
  39. #include "parser/common/op_def/arg_op.h"
  40. #include "parser/tensorflow/tensorflow_fusion_custom_parser_adapter.h"
  41. #include "parser/tensorflow/tensorflow_reshape_parser.h"
  42. #include "parser/tensorflow/tensorflow_custom_parser_adapter.h"
  43. #include "parser/tensorflow/tensorflow_squeeze_parser.h"
  44. #include "parser/tensorflow/graph_functiondef.h"
  45. #include "parser/tensorflow/graph_optimizer.h"
  46. #include "cce/dnn_base_def.hpp"
  47. #include "parser/tensorflow/scope/scope_pass_manager.h"
  48. #include "parser/tensorflow/tensorflow_util.h"
  49. #include "compute_graph_impl.h"
  50. #include "parser/tensorflow/tensorflow_enter_parser.h"
  51. #include "parser/common/op_def/ir_pb_converter.h"
  52. #include "parser/common/tuple.h"
  53. #include "common/op_def/frameworkop_op.h"
  54. #include "common/op_def/shape_n_op.h"
  55. #include "common/op_def/var_is_initialized_op_op.h"
  56. #include "common/op_def/fill_op.h"
  57. #include "common/convert/pb2json.h"
  58. #include "common/convert/message2operator.h"
  59. #include "parser/common/proto_file_parser.h"
  60. #include "parser/common/pre_checker.h"
  61. #include "parser/common/tbe_plugin_loader.h"
  62. #include "parser/common/data_op_parser.h"
  63. #include "parser/common/model_saver.h"
  64. #include "framework/omg/parser/parser_api.h"
  65. #include "framework/omg/parser/parser_factory.h"
  66. #include "parser/common/parser_fp16_t.h"
  67. #include "parser/common/op_parser_factory.h"
  68. #include "parser/common/prototype_pass_manager.h"
  69. #include "parser/common/register_tbe.h"
  70. #include "parser/common/pass_manager.h"
  71. #include "parser/tensorflow/graph_optimizer.h"
  72. #include "metadef/inc/register/scope/scope_pass_registry_impl.h"
  73. #include "register/scope/scope_fusion_pass_register.h"
  74. #undef protected
  75. #undef private
  76. using namespace std;
  77. using namespace domi::tensorflow;
  78. using namespace domi;
  79. using namespace cce;
  80. using namespace testing;
  81. using namespace std;
  82. using namespace google::protobuf;
  83. static const string GRAPH_DEFAULT_NAME = "default";
  84. namespace ge {
  85. class STestTensorflowParser : public testing::Test {
  86. protected:
  87. void SetUp() {
  88. ParerSTestsUtils::ClearParserInnerCtx();
  89. }
  90. void TearDown() {}
  91. public:
  92. void RegisterCustomOp();
  93. };
  94. class TestOperator : public ParserOperator
  95. {
  96. public:
  97. TestOperator()
  98. : ParserOperator("test")
  99. {
  100. }
  101. ~TestOperator()
  102. {
  103. }
  104. };
  105. class ErrorGraphPass: public GraphPass
  106. {
  107. Status Run(ComputeGraphPtr graph)
  108. {
  109. return domi::FAILED;
  110. }
  111. };
  112. class ScopeTestPass : public ScopeBasePass {
  113. protected:
  114. vector<ScopeFusionPatterns> DefinePatterns() {
  115. vector<ScopeFusionPatterns> patterns_list;
  116. return patterns_list;
  117. };
  118. string PassName() {
  119. return "test";
  120. };
  121. Status LastMatchScopesAndOPs(shared_ptr<ScopeGraph> &scope_graph, vector<ScopesResult> &results) {
  122. return domi::SUCCESS;
  123. };
  124. void GenerateFusionResult(const vector<Scope *> &scopes, FusionScopesResult *fusion_rlt) {
  125. return;
  126. };
  127. };
  128. static Status ParseParams(const google::protobuf::Message* op_src, ge::Operator& op_dest) {
  129. return SUCCESS;
  130. }
  131. static Status ParseParamByOpFunc(const ge::Operator &op_src, ge::Operator& op_dest) {
  132. return SUCCESS;
  133. }
  134. void STestTensorflowParser::RegisterCustomOp() {
  135. REGISTER_CUSTOM_OP("Add")
  136. .FrameworkType(domi::TENSORFLOW)
  137. .OriginOpType("Add")
  138. .ParseParamsFn(ParseParams);
  139. std::vector<OpRegistrationData> reg_datas = domi::OpRegistry::Instance()->registrationDatas;
  140. for (auto reg_data : reg_datas) {
  141. domi::OpRegTbeParserFactory::Instance()->Finalize(reg_data);
  142. domi::OpRegistry::Instance()->Register(reg_data);
  143. }
  144. domi::OpRegistry::Instance()->registrationDatas.clear();
  145. }
  146. void AddDumpOriginName(const ge::NodePtr parent_node, const std::string& subgraph_name, ge::ComputeGraphPtr graph);
  147. namespace {
  148. NodeDef* AddNode(GraphDef& graph, string type, string name) {
  149. NodeDef* nodeDef = graph.add_node();
  150. nodeDef->set_op(type);
  151. nodeDef->set_name(name);
  152. tensorflow::OpDef op_def;
  153. string op_def_string;
  154. op_def.SerializeToString(&op_def_string);
  155. tensorflow::AttrValue value;
  156. value.set_s(op_def_string);
  157. nodeDef->mutable_attr()->insert({"op_def", value});
  158. return nodeDef;
  159. }
  160. void AddInput(NodeDef* src, NodeDef* dst, int srcIndex) {
  161. if(srcIndex == -1){
  162. dst->add_input("^"+src->name());
  163. } else {
  164. if (srcIndex == 0) {
  165. dst->add_input(src->name());
  166. } else {
  167. dst->add_input(src->name() + ":" + std::to_string(srcIndex));
  168. }
  169. {
  170. auto input = (*dst->mutable_attr())[ge::ATTR_NAME_INPUT_TENSOR_DESC].mutable_list()->add_func();
  171. tensorflow::AttrValue val1;
  172. val1.set_i(0);
  173. (*input->mutable_attr())["serialize_format"] = val1;
  174. tensorflow::AttrValue val2;
  175. val2.set_i(tensorflow::DT_FLOAT);
  176. (*input->mutable_attr())["serialize_datatype"] = val2;
  177. tensorflow::AttrValue val3;
  178. val3.mutable_list()->add_i(10);
  179. (*input->mutable_attr())["serialize_shape"] = val3;
  180. }
  181. {
  182. auto output = (*src->mutable_attr())[ge::ATTR_NAME_OUTPUT_TENSOR_DESC].mutable_list()->add_func();
  183. tensorflow::AttrValue val1;
  184. val1.set_i(0);
  185. (*output->mutable_attr())["serialize_format"] = val1;
  186. tensorflow::AttrValue val2;
  187. val2.set_i(tensorflow::DT_FLOAT);
  188. (*output->mutable_attr())["serialize_datatype"] = val2;
  189. tensorflow::AttrValue val3;
  190. val3.mutable_list()->add_i(10);
  191. (*output->mutable_attr())["serialize_shape"] = val3;
  192. }
  193. }
  194. }
  195. NodeDef *initNodeDef() {
  196. NodeDef * nodeDef = new NodeDef();
  197. nodeDef->set_op("Const");
  198. ::google::protobuf::Map<std::string, tensorflow::AttrValue >* node_attr_map = nodeDef->mutable_attr();
  199. //设置 T属性
  200. domi::tensorflow::AttrValue t_attr_value;
  201. t_attr_value.set_type(domi::tensorflow::DT_INT32);
  202. (*node_attr_map)[TENSORFLOW_ATTR_T] = t_attr_value;
  203. domi::tensorflow::AttrValue dtype_attr_value;
  204. dtype_attr_value.set_type(domi::tensorflow::DT_INT32);
  205. (*node_attr_map)[TENSORFLOW_ATTR_DTYPE] = dtype_attr_value;
  206. // out_put
  207. domi::tensorflow::AttrValue outputs_attr_value;
  208. ::tensorflow::AttrValue_ListValue* list = outputs_attr_value.mutable_list();
  209. list->add_s("MatMul");
  210. (*node_attr_map)[TENSORFLOW_ATTR_OUTPUT_OP] = outputs_attr_value;
  211. // 设置 tensor 属性
  212. domi::tensorflow::AttrValue value_attr_value;
  213. tensorflow::TensorProto* tensor = value_attr_value.mutable_tensor();
  214. tensorflow::TensorShapeProto* tensor_shape = tensor->mutable_tensor_shape();
  215. tensor_shape->clear_dim();
  216. tensor_shape->add_dim()->set_size(4);
  217. tensor_shape->add_dim()->set_size(6);
  218. tensor->set_dtype(domi::tensorflow::DT_INT32);
  219. float *addr = new float[24];
  220. for (int32_t i = 0; i < 24; i++) {
  221. *(addr + i) = 1.0 + i;
  222. }
  223. tensor->set_tensor_content((void *)addr, 24 * sizeof(float));
  224. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  225. delete[] addr;
  226. return nodeDef;
  227. }
  228. NodeDef * initOpNodeDef_VariableV2() {
  229. NodeDef * nodeDef = new NodeDef();
  230. nodeDef->set_op("VariableV2");
  231. google::protobuf::Map<std::string, tensorflow::AttrValue > *node_attr_map = nodeDef->mutable_attr();
  232. //设置data_format属性
  233. domi::tensorflow::AttrValue format_attr_value;
  234. format_attr_value.set_s("_FZ");
  235. (*node_attr_map)[VAR_ATTR_FORMAT] = format_attr_value;
  236. domi::tensorflow::AttrValue type_attr;
  237. type_attr.set_type(domi::tensorflow::DT_FLOAT);
  238. (*node_attr_map)[VAR_ATTR_DTYPE] = type_attr;
  239. domi::tensorflow::AttrValue container_attr_value;
  240. container_attr_value.set_s("container");
  241. (*node_attr_map)[VAR_ATTR_CONTAINER] = container_attr_value;
  242. domi::tensorflow::AttrValue shard_name_attr_value;
  243. shard_name_attr_value.set_s("shard_name");
  244. (*node_attr_map)[VAR_ATTR_SHARED_NAME] = shard_name_attr_value;
  245. domi::tensorflow::AttrValue shape_attr_value;
  246. shape_attr_value.mutable_shape()->add_dim()->set_size(1);
  247. shape_attr_value.mutable_shape()->add_dim()->set_size(2);
  248. shape_attr_value.mutable_shape()->add_dim()->set_size(3);
  249. shape_attr_value.mutable_shape()->add_dim()->set_size(4);
  250. (*node_attr_map)[ge::VAR_ATTR_SHAPE] = shape_attr_value;
  251. domi::tensorflow::AttrValue shape;
  252. shape.mutable_list()->add_i((int64)32);
  253. shape.mutable_list()->add_i((int64)32);
  254. shape.mutable_list()->add_i((int64)14);
  255. shape.mutable_list()->add_i((int64)14);
  256. //设置data_format属性
  257. domi::tensorflow::AttrValue df_attr_value;
  258. domi::tensorflow::AttrValue df_attr_value2;
  259. df_attr_value2.set_s(TENSORFLOWF_TENSOR_NHWC);
  260. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  261. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value2;
  262. //设置padding属性
  263. domi::tensorflow::AttrValue pad_attr_value;
  264. domi::tensorflow::AttrValue pad_attr_value2;
  265. pad_attr_value2.set_s(TENSORFLOWF_OP_PADDING_SAME);
  266. (*node_attr_map)[TENSORFLOW_ATTR_PADDING] = pad_attr_value2;
  267. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  268. domi::tensorflow::NameAttrList name_attr_list;
  269. name_attr_list.set_name(std::to_string(0));
  270. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  271. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  272. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  273. domi::tensorflow::AttrValue output_tensor_descs;
  274. *(output_tensor_descs.mutable_list()->add_func()) = name_attr_list;
  275. nodeDef->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, output_tensor_descs});
  276. return nodeDef;
  277. }
  278. NodeDef *initOpNodeDef_TemporaryVariable() {
  279. NodeDef * nodeDef = new NodeDef();
  280. nodeDef->set_op("TemporaryVariable");
  281. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef->mutable_attr();
  282. //设置dtype属性
  283. domi::tensorflow::AttrValue type_attr;
  284. type_attr.set_type(domi::tensorflow::DT_FLOAT);
  285. (*node_attr_map)[VAR_ATTR_DTYPE] = type_attr;
  286. //设置var_name属性
  287. domi::tensorflow::AttrValue var_name_attr_value;
  288. var_name_attr_value.set_s("temporary_variable_name");
  289. (*node_attr_map)[ge::VAR_ATTR_NAME] = var_name_attr_value;
  290. //设置shape属性
  291. domi::tensorflow::AttrValue shape_attr_value;
  292. shape_attr_value.mutable_shape()->add_dim()->set_size(1);
  293. shape_attr_value.mutable_shape()->add_dim()->set_size(2);
  294. shape_attr_value.mutable_shape()->add_dim()->set_size(3);
  295. shape_attr_value.mutable_shape()->add_dim()->set_size(4);
  296. (*node_attr_map)[ge::VAR_ATTR_SHAPE] = shape_attr_value;
  297. domi::tensorflow::AttrValue shape;
  298. shape.mutable_list()->add_i((int64)32);
  299. shape.mutable_list()->add_i((int64)32);
  300. shape.mutable_list()->add_i((int64)14);
  301. shape.mutable_list()->add_i((int64)14);
  302. //设置data_format属性
  303. domi::tensorflow::AttrValue df_attr_value2;
  304. df_attr_value2.set_s(TENSORFLOWF_TENSOR_NHWC);
  305. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value2;
  306. domi::tensorflow::AttrValue df_attr_value;
  307. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  308. //设置padding属性
  309. domi::tensorflow::AttrValue pad_attr_value2;
  310. pad_attr_value2.set_s(TENSORFLOWF_OP_PADDING_SAME);
  311. (*node_attr_map)[TENSORFLOW_ATTR_PADDING] = pad_attr_value2;
  312. domi::tensorflow::AttrValue pad_attr_value;
  313. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  314. domi::tensorflow::NameAttrList name_attr_list;
  315. name_attr_list.set_name(std::to_string(0));
  316. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  317. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  318. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  319. domi::tensorflow::AttrValue output_tensor_descs;
  320. *(output_tensor_descs.mutable_list()->add_func()) = name_attr_list;
  321. nodeDef->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, output_tensor_descs});
  322. return nodeDef;
  323. }
  324. NodeDef *fusioninitNodeDef(int index) {
  325. NodeDef *nodeDef = new NodeDef();
  326. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef->mutable_attr();
  327. //设置 type属性
  328. domi::tensorflow::AttrValue dtype_attr_value ;
  329. if (index == 0) {
  330. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  331. } else if (index == 1) {
  332. dtype_attr_value.set_type(domi::tensorflow::DT_INT32);
  333. } else if (index == 2) {
  334. dtype_attr_value.set_type(tensorflow::DT_HALF);
  335. }
  336. (*node_attr_map)[ge::TENSORFLOW_ATTR_DTYPE] = dtype_attr_value;
  337. //设置data_format属性
  338. domi::tensorflow::AttrValue df_attr_value;
  339. df_attr_value.set_s(TENSORFLOWF_TENSOR_NCHW);
  340. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value;
  341. // 设置 tensor 属性
  342. domi::tensorflow::AttrValue value_attr_value;
  343. ::tensorflow::TensorProto* tensor = value_attr_value.mutable_tensor();
  344. ::tensorflow::TensorShapeProto* tensor_shape = tensor->mutable_tensor_shape();
  345. tensor_shape->clear_dim();
  346. ::tensorflow::TensorShapeProto_Dim* dim = tensor_shape->add_dim();
  347. dim->set_name("tensor dim");
  348. dim->set_size(1);
  349. if (index == 0) {
  350. tensor->set_dtype(domi::tensorflow::DT_FLOAT);
  351. float *addr = new float[1];
  352. *addr = 1.0;
  353. tensor->set_tensor_content((void *)addr, sizeof(float));
  354. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  355. delete[] addr;
  356. } else if (index == 1) {
  357. tensor->set_dtype(domi::tensorflow::DT_INT32);
  358. int32_t *addr = new int32_t[1];
  359. *addr = 1;
  360. tensor->set_tensor_content((void *)addr, sizeof(int32_t));
  361. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  362. delete[] addr;
  363. } else if (index == 2) {
  364. tensor->set_dtype(tensorflow::DT_HALF);
  365. tensor->add_half_val(1);
  366. (*node_attr_map)[TENSORFLOW_ATTR_VALUE] = value_attr_value;
  367. }
  368. return nodeDef;
  369. }
  370. NodeDef *MallocNodeDef(const string &name, const string &type) {
  371. NodeDef* node_def = new (std::nothrow) NodeDef();
  372. if (node_def != nullptr) {
  373. node_def->set_name(name);
  374. node_def->set_op(type);
  375. }
  376. return node_def;
  377. }
  378. void GenOriginNodeDef(ge::TensorFlowModelParser *tensorflow_parser, vector<string> &node_name_list) {
  379. NodeDef* pre_node_a = MallocNodeDef("pre_node_a", "Const");
  380. EXPECT_NE(pre_node_a, nullptr);
  381. {
  382. google::protobuf::Map< ::std::string, ::tensorflow::AttrValue >* node_attr_map = pre_node_a->mutable_attr();
  383. tensorflow::AttrValue attr_dtype;
  384. attr_dtype.set_type(tensorflow::DT_FLOAT);
  385. (*node_attr_map)["dtype"] = attr_dtype;
  386. tensorflow::AttrValue attr_value;
  387. tensorflow::TensorProto* tensor = attr_value.mutable_tensor();
  388. tensor->add_bool_val(true);
  389. tensor->set_dtype(tensorflow::DT_BOOL);
  390. (*node_attr_map)["value"] = attr_value;
  391. }
  392. tensorflow_parser->nodedef_map_["pre_node_a"] = pre_node_a;
  393. node_name_list.push_back("pre_node_a");
  394. NodeDef* pre_node_ctrl_in = MallocNodeDef("pre_node_ctrl_in", "Const");
  395. EXPECT_NE(pre_node_ctrl_in, nullptr);
  396. {
  397. ::google::protobuf::Map< ::std::string, ::tensorflow::AttrValue >* node_attr_map = pre_node_ctrl_in->mutable_attr();
  398. tensorflow::AttrValue attr_dtype;
  399. attr_dtype.set_type(tensorflow::DT_FLOAT);
  400. (*node_attr_map)["dtype"] = attr_dtype;
  401. tensorflow::AttrValue attr_value;
  402. tensorflow::TensorProto* tensor = attr_value.mutable_tensor();
  403. tensor->add_bool_val(true);
  404. tensor->set_dtype(tensorflow::DT_BOOL);
  405. (*node_attr_map)["value"] = attr_value;
  406. }
  407. tensorflow_parser->nodedef_map_["pre_node_ctrl_in"] = pre_node_ctrl_in;
  408. node_name_list.push_back("pre_node_ctrl_in");
  409. NodeDef* post_node_b = MallocNodeDef("post_node_b", "Identity");
  410. EXPECT_NE(post_node_b, nullptr);
  411. tensorflow_parser->nodedef_map_["post_node_b"] = post_node_b;
  412. node_name_list.push_back("post_node_b");
  413. NodeDef* post_node_c = MallocNodeDef("post_node_c", "Identity");
  414. EXPECT_NE(post_node_c, nullptr);
  415. tensorflow_parser->nodedef_map_["post_node_c"] = post_node_c;
  416. node_name_list.push_back("post_node_c");
  417. NodeDef* post_node_d = MallocNodeDef("post_node_d", "Identity");
  418. EXPECT_NE(post_node_d, nullptr);
  419. tensorflow_parser->nodedef_map_["post_node_d"] = post_node_d;
  420. node_name_list.push_back("post_node_d");
  421. }
  422. void FreeNodeDefMap(ge::TensorFlowModelParser *tensorflow_parser, set<string> &malloc_node_name_list) {
  423. for (auto &item : tensorflow_parser->nodedef_map_) {
  424. if (item.second != nullptr && malloc_node_name_list.count(item.first) > 0) {
  425. delete (item.second);
  426. item.second = nullptr;
  427. }
  428. }
  429. }
  430. void GenFusionScopesResult(shared_ptr<ScopeGraph> &scope_graph, FusionScopesResult *fusion_rlt,
  431. const string &fusion_op_name) {
  432. if (fusion_rlt == nullptr) {
  433. return;
  434. }
  435. fusion_rlt->InsertInputs("scope_node_1", {0}); // scope input 0
  436. fusion_rlt->InsertOutputs("scope_node_m", {0}); // scope output 0
  437. fusion_rlt->InsertOutputs("scope_node_n", {1}); // scope output 1
  438. fusion_rlt->SetType(ge::kScopeToMultiNodes);
  439. fusion_rlt->SetName(fusion_op_name);
  440. fusion_rlt->SetDescription("Description for fusion node");
  441. // Add inner nodes in sequence.
  442. auto node1 = fusion_rlt->AddInnerNode("inner_node_1", "Unique"); // add inner node1
  443. CHECK_INNER_NODE_CONDITION(node1 != nullptr, fusion_rlt);
  444. auto ret = node1
  445. ->InsertInput(ge::kInputFromFusionScope, 0) // Input from 0th of boundary (a)
  446. .InsertOutput(ge::kOutputToFusionScope, 0) // Output to 0th of boundary (b)
  447. .InsertOutput("inner_node_2", 0) // Output to input 0th of internal node 2
  448. .BuildInnerNode(); // Construct an internal Operator
  449. CHECK_INNER_NODE_CONDITION(ret == ge::GRAPH_SUCCESS, fusion_rlt);
  450. string str_val = "This is a string.";
  451. node1->MutableOperator()->SetAttr("key1", 2); // Set integer attribute
  452. node1->MutableOperator()->SetAttr("key2", str_val); // Set the string attribute
  453. node1->MutableOperator()->SetAttr("key3", true); // Set boolean attribute
  454. auto node2 = fusion_rlt->AddInnerNode("inner_node_2", "Identity"); // add inner node2
  455. CHECK_INNER_NODE_CONDITION(node2 != nullptr, fusion_rlt);
  456. ret = node2
  457. ->InsertInput("inner_node_1", 1) // The input comes from the 1st output of internal node 1
  458. .InsertOutput("inner_node_3", 0) // Output to input 0th of internal node 3
  459. .BuildInnerNode();
  460. CHECK_INNER_NODE_CONDITION(ret == ge::GRAPH_SUCCESS, fusion_rlt);
  461. node2->SetInputFormat("x", "NHWC");
  462. node2->SetOutputFormat("y", "NHWC");
  463. auto node3 = fusion_rlt->AddInnerNode("inner_node_3", "Identity"); // add inner node3
  464. CHECK_INNER_NODE_CONDITION(node3 != nullptr, fusion_rlt);
  465. ret = node3
  466. ->InsertInput("inner_node_2", 0) // The input comes from the 0th output of internal node 2
  467. .InsertOutput(ge::kOutputToFusionScope, 1) // Output to 1st of boundary (c)
  468. .BuildInnerNode();
  469. CHECK_INNER_NODE_CONDITION(ret == ge::GRAPH_SUCCESS, fusion_rlt);
  470. scope_graph->impl_->AddFusionScopesResult(fusion_rlt);
  471. }
  472. void GenOriginContext(ge::TensorFlowModelParser *tensorflow_parser, const string &fusion_op_name) {
  473. // op_node_context for fusion op
  474. ge::OpNodeContext op_node_context;
  475. op_node_context.input_map["pre_node_a"].push_back({0, 0});
  476. op_node_context.input_map["pre_node_ctrl_in"].push_back({-1, -1}); // ctrl edges
  477. op_node_context.output_map["post_node_b"].push_back({0, 0});
  478. op_node_context.output_map["post_node_c"].push_back({1, 0});
  479. op_node_context.output_map["post_node_d"].push_back({-1, -1});
  480. op_node_context.output_map["_Retval"].push_back({0, 1});
  481. // ctrl edges
  482. tensorflow_parser->op_node_context_map_[fusion_op_name] = op_node_context;
  483. tensorflow_parser->SaveEdgesControlInfo(fusion_op_name, -1);
  484. // op_node_context for pre_node_a
  485. ge::OpNodeContext op_node_context_a;
  486. op_node_context_a.output_map[fusion_op_name].push_back({0, 0});
  487. tensorflow_parser->op_node_context_map_["pre_node_a"] = op_node_context_a;
  488. // op_node_context for pre_node_ctrl_in
  489. ge::OpNodeContext op_node_context_ctrl_in;
  490. op_node_context_ctrl_in.output_map[fusion_op_name].push_back({-1, -1}); // ctrl edges
  491. tensorflow_parser->op_node_context_map_["pre_node_ctrl_in"] = op_node_context_ctrl_in;
  492. // op_node_context for post_node_b
  493. ge::OpNodeContext op_node_context_b;
  494. op_node_context_b.input_map[fusion_op_name].push_back({0, 0});
  495. tensorflow_parser->op_node_context_map_["post_node_b"] = op_node_context_b;
  496. // op_node_context for post_node_c
  497. ge::OpNodeContext op_node_context_c;
  498. op_node_context_c.output_map["post_node_d"].push_back({0, 0});
  499. tensorflow_parser->op_node_context_map_["post_node_c"] = op_node_context_c;
  500. // op_node_context for post_node_d
  501. ge::OpNodeContext op_node_context_d;
  502. op_node_context_d.input_map[fusion_op_name].push_back({-1, -1}); // ctrl edges
  503. tensorflow_parser->op_node_context_map_["post_node_d"] = op_node_context_d;
  504. // op_node_context for Retval
  505. ge::OpNodeContext op_node_context_Retval;
  506. op_node_context_d.input_map["post_node_d"].push_back({-1, -1});
  507. op_node_context_c.output_map["fusion_op_name"].push_back({0,1});
  508. tensorflow_parser->op_node_context_map_["_Retval"] = op_node_context_Retval;
  509. tensorflow_parser->SaveEdgesControlInfo("op_node_context_Retval", -1);
  510. string fusion_op_type = ge::kScopeToMultiNodes;
  511. string description = "fusion op description";
  512. tensorflow_parser->fusion_op_type_map_[fusion_op_name].push_back(fusion_op_type);
  513. tensorflow_parser->fusion_op_type_map_[fusion_op_name].push_back(description);
  514. }
  515. void register_tbe_op() {
  516. std::vector<OpRegistrationData> registrationDatas = OpRegistry::Instance()->registrationDatas;
  517. for (OpRegistrationData reg_data : registrationDatas) {
  518. domi::OpRegTbeParserFactory::Instance()->Finalize(reg_data);
  519. OpRegistry::Instance()->Register(reg_data);
  520. }
  521. OpRegistry::Instance()->registrationDatas.clear();
  522. }
  523. NodeDef *initNodeDef_axis_dims() {
  524. NodeDef *nodeDef = new NodeDef();
  525. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef->mutable_attr();
  526. //设置T属性
  527. domi::tensorflow::AttrValue dtype_attr_value ;
  528. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  529. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  530. //设置strides属性
  531. domi::tensorflow::AttrValue axis_attr_value;
  532. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  533. list->add_i(1);
  534. list->add_i(2);
  535. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  536. (*node_attr_map)[ge::SQUEEZE_ATTR_DIMS] = axis_attr_value;
  537. return nodeDef;
  538. }
  539. NodeDef *initNodeDef_dims() {
  540. NodeDef *nodeDef = new NodeDef();
  541. ::google::protobuf::Map<std::string, tensorflow::AttrValue > *node_attr_map = nodeDef->mutable_attr();
  542. //设置T属性
  543. domi::tensorflow::AttrValue dtype_attr_value ;
  544. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  545. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  546. //设置strides属性
  547. domi::tensorflow::AttrValue axis_attr_value;
  548. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  549. list->add_i(1);
  550. list->add_i(2);
  551. (*node_attr_map)[ge::SQUEEZE_ATTR_DIMS] = axis_attr_value;
  552. return nodeDef;
  553. }
  554. void CreateOpDef(const string& _name, const string& _type, ge::OpDescPtr opDef) {
  555. tensorflow::OpDef tsOpDef;
  556. tsOpDef.set_name(_name);
  557. tensorflow::OpDef_ArgDef* outArgDef = tsOpDef.add_output_arg();
  558. outArgDef->set_name(_name);
  559. outArgDef->set_description("outArgDef");
  560. outArgDef->set_type((tensorflow::DataType)3);
  561. if ((_name == "A") || (_name == "B")) {
  562. tensorflow::OpDef_ArgDef* argDef1 = tsOpDef.add_output_arg();
  563. string name = _name+"t";
  564. argDef1->set_name(name);
  565. argDef1->set_description("this is a test 2");
  566. argDef1->set_type((tensorflow::DataType)3);
  567. }
  568. if ((_name == "C") ) {
  569. outArgDef->set_number_attr("num");
  570. }
  571. if ((_name == "D") ) {
  572. outArgDef->set_type_list_attr("type_list");
  573. }
  574. string strTsOpDef;
  575. tsOpDef.SerializeToString(&strTsOpDef);
  576. ge::AttrUtils::SetStr(opDef, "op_def", strTsOpDef);
  577. tensorflow::NodeDef nodedef;
  578. nodedef.set_name(_name);
  579. nodedef.set_op(_name);
  580. string name("op_def");
  581. tensorflow::AttrValue value;
  582. value.set_s(strTsOpDef);
  583. TensorFlowUtil::AddNodeAttr(name, value, &nodedef);
  584. value.set_i(1);
  585. TensorFlowUtil::AddNodeAttr("num", value, &nodedef);
  586. value.mutable_list();
  587. TensorFlowUtil::AddNodeAttr("type_list", value, &nodedef);
  588. string strNodeDef;
  589. nodedef.SerializeToString(&strNodeDef);
  590. ge::GeAttrValue::BYTES nodedefBytes;
  591. nodedefBytes = ge::GeAttrValue::BYTES::CopyFrom((uint8_t*)strNodeDef.data(), strNodeDef.length());
  592. ge::AttrUtils::SetBytes(opDef, "node_def", nodedefBytes);
  593. if ((_name== "S") || (_name == "K")) {
  594. int index = 0;
  595. ge::AttrUtils::SetInt(opDef, "T", 1);
  596. ge::AttrUtils::SetInt(opDef, "arg_index", index);
  597. ge::AttrUtils::SetInt(opDef, "ret_index", index);
  598. }
  599. }
  600. ge::NodePtr AddNode(ge::ComputeGraphPtr graph, const string& _name, const string& _type,int32_t i_n, int32_t o_n) {
  601. ge::OpDescPtr opDef = std::make_shared<ge::OpDesc>();
  602. opDef->SetName(_name);
  603. opDef->SetType(_type);
  604. for(int32_t i = 0; i < i_n; i++) {
  605. ge::GeTensorDesc input;
  606. input.SetDataType((ge::DataType)1);
  607. opDef->AddInputDesc(input);
  608. }
  609. for(int32_t i = 0;i < o_n; i++) {
  610. ge::GeTensorDesc output;
  611. output.SetDataType((ge::DataType)1);
  612. opDef->AddOutputDesc(output);
  613. }
  614. CreateOpDef(_name, _type, opDef);
  615. return graph->AddNode(opDef);
  616. }
  617. void MakeDagGraph(ge::ComputeGraphPtr graph, const string& input_node_type) {
  618. ge::NodePtr node_s = AddNode(graph, "S", parser::DATA,1,1);
  619. ge::NodePtr node_a = AddNode(graph, "A", "testa",1,2);
  620. ge::NodePtr node_b = AddNode(graph, "B", "testb",1,2);
  621. ge::NodePtr node_c = AddNode(graph, "C", "testc",1,1);
  622. ge::NodePtr node_d = AddNode(graph, "D", "testd",1,1);
  623. ge::NodePtr node_e = AddNode(graph, "E", "teste",1,1);
  624. ge::NodePtr node_f = AddNode(graph, "F", "testf",1,1);
  625. ge::NodePtr node_g = AddNode(graph, "G", "testg",2,1);
  626. ge::NodePtr node_h = AddNode(graph, "H", "testh",1,1);
  627. ge::NodePtr node_i = AddNode(graph, "I", "testi",1,1);
  628. ge::NodePtr node_j = AddNode(graph, "J", "testj",2,1);
  629. ge::NodePtr node_k = AddNode(graph, "K", parser::NETOUTPUT,1,1);
  630. ge::GraphUtils::AddEdge(node_s->GetOutDataAnchor(0), node_a->GetInDataAnchor(0));
  631. ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_b->GetInDataAnchor(0));
  632. ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(1), node_c->GetInDataAnchor(0));
  633. ge::GraphUtils::AddEdge(node_b->GetOutDataAnchor(0), node_d->GetInDataAnchor(0));
  634. ge::GraphUtils::AddEdge(node_b->GetOutDataAnchor(1), node_e->GetInDataAnchor(0));
  635. ge::GraphUtils::AddEdge(node_c->GetOutDataAnchor(0), node_g->GetInDataAnchor(0));
  636. ge::GraphUtils::AddEdge(node_d->GetOutDataAnchor(0), node_f->GetInDataAnchor(0));
  637. ge::GraphUtils::AddEdge(node_e->GetOutDataAnchor(0), node_g->GetInDataAnchor(1));
  638. ge::GraphUtils::AddEdge(node_f->GetOutDataAnchor(0), node_h->GetInDataAnchor(0));
  639. ge::GraphUtils::AddEdge(node_g->GetOutDataAnchor(0), node_j->GetInDataAnchor(0));
  640. ge::GraphUtils::AddEdge(node_h->GetOutDataAnchor(0), node_i->GetInDataAnchor(0));
  641. ge::GraphUtils::AddEdge(node_i->GetOutDataAnchor(0), node_j->GetInDataAnchor(1));
  642. ge::GraphUtils::AddEdge(node_j->GetOutDataAnchor(0), node_k->GetInDataAnchor(0));
  643. ge::GraphUtils::AddEdge(node_h->GetOutControlAnchor(), node_j->GetInControlAnchor());
  644. }
  645. void MakeGraph(const ComputeGraphPtr &root_graph, const string &name) {
  646. root_graph->SetName(name);
  647. ge::NodePtr data1 = AddNode(root_graph, name + "_input1", parser::DATA, 1, 1);
  648. ge::NodePtr data2 = AddNode(root_graph, name + "_input2", parser::DATA, 1, 1);
  649. ge::NodePtr add = AddNode(root_graph, name + "_add", parser::ADD, 2, 1);
  650. ge::NodePtr net_output = AddNode(root_graph, name + "_net_output", parser::NETOUTPUT, 1, 1);
  651. ge::GraphUtils::AddEdge(data1->GetOutDataAnchor(0), add->GetInDataAnchor(0));
  652. ge::GraphUtils::AddEdge(data2->GetOutDataAnchor(0), add->GetInDataAnchor(1));
  653. ge::GraphUtils::AddEdge(add->GetOutDataAnchor(0), net_output->GetInDataAnchor(0));
  654. }
  655. void ChangeDataType(tensorflow::NodeDef* node_tf, int32_t data_type)
  656. {
  657. domi::tensorflow::AttrValue input_attr_value;
  658. google::protobuf::Map<std::string, tensorflow::AttrValue>* attr = node_tf->mutable_attr();
  659. google::protobuf::Map<std::string, tensorflow::AttrValue>::const_iterator it = attr->find(ge::ATTR_NAME_INPUT_TENSOR_DESC);
  660. if (it != attr->end()) {
  661. input_attr_value = it->second;
  662. }
  663. (*attr)[ge::ATTR_NAME_INPUT_TENSOR_DESC] = input_attr_value;
  664. }
  665. NodeDef* AddGraphNode(GraphDef *graph, string name, string optype, string input)
  666. {
  667. NodeDef *node_def = graph->add_node();
  668. node_def->set_name(name);
  669. node_def->set_op(optype);
  670. node_def->add_input(input);
  671. return node_def;
  672. }
  673. ge::ComputeGraphPtr build_graph(bool with_leaf_node = false)
  674. {
  675. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>("default");
  676. ge::OpDescPtr data_op = std::make_shared<ge::OpDesc>();
  677. data_op->SetType(parser::DATA);
  678. data_op->SetName("Data1");
  679. data_op->AddInputDesc(ge::GeTensorDesc());
  680. data_op->AddOutputDesc(ge::GeTensorDesc());
  681. ge::NodePtr data1 = graph->AddNode(data_op);
  682. ge::OpDescPtr relu_op1 = std::make_shared<ge::OpDesc>();
  683. relu_op1->SetType(parser::ACTIVATION);
  684. relu_op1->SetName("Relu1");
  685. relu_op1->AddInputDesc(ge::GeTensorDesc());
  686. relu_op1->AddOutputDesc(ge::GeTensorDesc());
  687. ge::NodePtr relu1 = graph->AddNode(relu_op1);
  688. ge::OpDescPtr relu_op2 = std::make_shared<ge::OpDesc>();
  689. relu_op2->SetType(parser::RELU);
  690. relu_op2->SetName("Relu2");
  691. relu_op2->AddInputDesc(ge::GeTensorDesc());
  692. relu_op2->AddOutputDesc(ge::GeTensorDesc());
  693. relu_op2->AddOutputDesc(ge::GeTensorDesc());
  694. ge::NodePtr relu2 = graph->AddNode(relu_op2);
  695. ge::OpDescPtr relu_op3 = std::make_shared<ge::OpDesc>();
  696. relu_op3->SetType(parser::ACTIVATION);
  697. relu_op3->SetName("Relu3");
  698. relu_op3->AddInputDesc(ge::GeTensorDesc());
  699. relu_op3->AddOutputDesc(ge::GeTensorDesc());
  700. ge::NodePtr relu3;
  701. if (with_leaf_node == true) {
  702. relu3 = graph->AddNode(relu_op3);
  703. }
  704. ge::OpDescPtr mul_op = std::make_shared<ge::OpDesc>();
  705. mul_op->SetType(parser::MUL);
  706. mul_op->SetName("Mul");
  707. mul_op->AddInputDesc(ge::GeTensorDesc());
  708. mul_op->AddInputDesc(ge::GeTensorDesc());
  709. mul_op->AddOutputDesc(ge::GeTensorDesc());
  710. mul_op->AddOutputDesc(ge::GeTensorDesc());
  711. mul_op->AddOutputDesc(ge::GeTensorDesc());
  712. mul_op->AddOutputDesc(ge::GeTensorDesc());
  713. ge::NodePtr mul = graph->AddNode(mul_op);
  714. ge::OpDescPtr mul_op1 = std::make_shared<ge::OpDesc>();
  715. mul_op1->SetType(parser::MUL);
  716. mul_op1->SetName("Mul1");
  717. mul_op1->AddInputDesc(ge::GeTensorDesc());
  718. mul_op1->AddInputDesc(ge::GeTensorDesc());
  719. mul_op1->AddOutputDesc(ge::GeTensorDesc());
  720. ge::NodePtr mul1 = graph->AddNode(mul_op1);
  721. ge::OpDescPtr mul_op2 = std::make_shared<ge::OpDesc>();
  722. mul_op2->SetType(parser::MUL);
  723. mul_op2->SetName("Mul2");
  724. mul_op2->AddInputDesc(ge::GeTensorDesc());
  725. mul_op2->AddInputDesc(ge::GeTensorDesc());
  726. mul_op2->AddOutputDesc(ge::GeTensorDesc());
  727. ge::NodePtr mul2 = graph->AddNode(mul_op2);
  728. ge::OpDescPtr fc_op = std::make_shared<ge::OpDesc>();
  729. fc_op->SetType(parser::FULL_CONNECTION);
  730. fc_op->SetName("FullConnection");
  731. fc_op->AddInputDesc(ge::GeTensorDesc());
  732. fc_op->AddOutputDesc(ge::GeTensorDesc());
  733. fc_op->AddOutputDesc(ge::GeTensorDesc());
  734. ge::NodePtr fc = graph->AddNode(fc_op);
  735. ge::GraphUtils::AddEdge(data1->GetOutDataAnchor(0), relu1->GetInDataAnchor(0));
  736. ge::GraphUtils::AddEdge(relu1->GetOutDataAnchor(0), fc->GetInDataAnchor(0));
  737. ge::GraphUtils::AddEdge(fc->GetOutDataAnchor(0), relu2->GetInDataAnchor(0));
  738. if (with_leaf_node == true) {
  739. ge::GraphUtils::AddEdge(fc->GetOutDataAnchor(1), relu3->GetInDataAnchor(0));
  740. }
  741. ge::GraphUtils::AddEdge(relu2->GetOutDataAnchor(0), mul->GetInDataAnchor(0));
  742. ge::GraphUtils::AddEdge(relu2->GetOutDataAnchor(1), mul->GetInDataAnchor(1));
  743. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(0), mul1->GetInDataAnchor(0));
  744. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(1), mul1->GetInDataAnchor(1));
  745. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(2), mul2->GetInDataAnchor(0));
  746. ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(3), mul2->GetInDataAnchor(1));
  747. return graph;
  748. }
  749. }
  750. namespace {
  751. REG_OP(Data)
  752. .INPUT(x, TensorType::ALL())
  753. .OUTPUT(y, TensorType::ALL())
  754. .ATTR(index, Int, 0)
  755. .OP_END_FACTORY_REG(Data)
  756. REG_OP(Add)
  757. .INPUT(x1, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16,
  758. DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128,
  759. DT_COMPLEX64, DT_STRING}))
  760. .INPUT(x2, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16,
  761. DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128,
  762. DT_COMPLEX64, DT_STRING}))
  763. .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16,
  764. DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128,
  765. DT_COMPLEX64, DT_STRING}))
  766. .OP_END_FACTORY_REG(Add)
  767. }
  768. static Status FusionParserParams(const std::vector<const google::protobuf::Message *> inside_nodes, ge::Operator &op) {
  769. return domi::SUCCESS;
  770. }
  771. static MemBuffer* MemBufferFromFile(const char *path)
  772. {
  773. char path_temp[PATH_MAX + 1] = {0x00};
  774. if(strlen(path) > PATH_MAX || nullptr == realpath(path, path_temp)) {
  775. return nullptr;
  776. }
  777. FILE *fp = fopen(path_temp, "r+");
  778. if (fp == nullptr) {
  779. return nullptr;
  780. }
  781. // get model file length
  782. if (0 != fseek(fp, 0, SEEK_END)) {
  783. fclose(fp);
  784. return nullptr;
  785. }
  786. long file_length = ftell(fp);
  787. if (fseek(fp, 0, SEEK_SET)) {
  788. fclose(fp);
  789. return nullptr;
  790. }
  791. if (file_length <= 0) {
  792. fclose(fp);
  793. return nullptr;
  794. }
  795. // alloc model buffer
  796. void *data = malloc((unsigned int)file_length);
  797. if (!data) {
  798. fclose(fp);
  799. return nullptr;
  800. }
  801. // read file into memory
  802. uint32_t read_size = (uint32_t)fread(data, 1, (unsigned int)file_length, fp);
  803. // check if read success
  804. if ((long)read_size != file_length) {
  805. free(data);
  806. data = nullptr;
  807. fclose(fp);
  808. return nullptr;
  809. }
  810. // close model file
  811. fclose(fp);
  812. // create an MemBuffer
  813. MemBuffer* membuf = new MemBuffer();
  814. if (!membuf) {
  815. free(data);
  816. data = nullptr;
  817. return nullptr;
  818. }
  819. membuf->data = malloc((unsigned int)read_size);
  820. // set size && data
  821. membuf->size = (uint32_t)read_size;
  822. memcpy((char*)membuf->data, (char*)data, read_size);
  823. free(data);
  824. return membuf;
  825. }
  826. /// placeholder0 placeholder1
  827. /// | /\ /\ |
  828. /// | / \/ \ |
  829. /// | / /\ \ |
  830. /// | | / \ | |
  831. /// | add0 mul0 |
  832. /// | / /c | \ |
  833. /// mul1 --- / | add1
  834. /// \ | |
  835. /// \ ---- add2 |
  836. /// | |
  837. /// retval0 retval1
  838. void CreateGraphDef(domi::tensorflow::GraphDef &graph_def) {
  839. // 1. add node
  840. auto placeholder0 = graph_def.add_node();
  841. auto placeholder1 = graph_def.add_node();
  842. auto add0 = graph_def.add_node();
  843. auto add1 = graph_def.add_node();
  844. auto mul0 = graph_def.add_node();
  845. auto mul1 = graph_def.add_node();
  846. auto add2 = graph_def.add_node();
  847. auto retval0 = graph_def.add_node();
  848. auto retval1 = graph_def.add_node();
  849. auto softmax0 = graph_def.add_node();
  850. auto softmax1 = graph_def.add_node();
  851. // 2. set info
  852. placeholder0->set_name("placeholder0");
  853. placeholder0->set_op("PlaceHolder");
  854. placeholder1->set_name("placeholder1");
  855. placeholder1->set_op("PlaceHolder");
  856. add0->set_name("add0");
  857. add0->set_op("Add");
  858. add1->set_name("add1");
  859. add1->set_op("Add");
  860. add2->set_name("add2");
  861. add2->set_op("Add");
  862. mul0->set_name("mul0");
  863. mul0->set_op("Mul");
  864. mul1->set_name("mul1");
  865. mul1->set_op("Mul");
  866. retval0->set_name("retval0");
  867. retval0->set_op("_RetVal");
  868. retval1->set_name("retval1");
  869. retval1->set_op("_RetVal");
  870. retval0->set_name("retval0");
  871. retval0->set_op("_RetVal");
  872. retval1->set_name("retval1");
  873. retval1->set_op("_RetVal");
  874. softmax0->set_name("Softmax0");
  875. softmax0->set_op("Softmax");
  876. softmax1->set_name("Softmax1");
  877. softmax1->set_op("Softmax");
  878. // 3. add edges
  879. add0->add_input("placeholder0");
  880. add0->add_input("placeholder1");
  881. mul0->add_input("placeholder0");
  882. mul0->add_input("placeholder1");
  883. mul1->add_input("placeholder0");
  884. mul1->add_input("add0");
  885. mul1->add_input("^mul0");
  886. add1->add_input("mul0");
  887. add1->add_input("placeholder1");
  888. add2->add_input("mul1");
  889. add2->add_input("mul0");
  890. retval0->add_input("add2:0");
  891. retval1->add_input("add1:0");
  892. softmax0->add_input("add3:0");
  893. softmax0->add_input("add2:0");
  894. }
  895. TEST_F(STestTensorflowParser, tensorflow_parser_success) {
  896. RegisterCustomOp();
  897. std::string case_dir = __FILE__;
  898. ParserOperator unused("Add");
  899. case_dir = case_dir.substr(0, case_dir.find_last_of("/"));
  900. std::string model_file = case_dir + "/origin_models/tf_add.pb";
  901. std::map<ge::AscendString, ge::AscendString> parser_params;
  902. ge::Graph graph;
  903. auto ret = ge::aclgrphParseTensorFlow(model_file.c_str(), parser_params, graph);
  904. ASSERT_EQ(ret, SUCCESS);
  905. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  906. auto output_nodes_info = compute_graph->GetGraphOutNodesInfo();
  907. ASSERT_EQ(output_nodes_info.size(), 1);
  908. EXPECT_EQ((output_nodes_info.at(0).first->GetName()), "add_test_1");
  909. EXPECT_EQ((output_nodes_info.at(0).second), 0);
  910. auto &net_out_name = ge::GetParserContext().net_out_nodes;
  911. ASSERT_EQ(net_out_name.size(), 1);
  912. EXPECT_EQ(net_out_name.at(0), "add_test_1:0");
  913. }
  914. TEST_F(STestTensorflowParser, tensorflow_model_Failed) {
  915. ge::Graph graph;
  916. std::string caseDir = __FILE__;
  917. std::size_t idx = caseDir.find_last_of("/");
  918. caseDir = caseDir.substr(0, idx);
  919. std::string modelFile = caseDir + "/origin_models/model.pb";
  920. auto status = ge::aclgrphParseTensorFlow(modelFile.c_str(), graph);
  921. EXPECT_EQ(status, ge::SUCCESS);
  922. modelFile = caseDir + "/origin_models/test_depth_wise_conv2d.pb";
  923. status = ge::aclgrphParseTensorFlow(modelFile.c_str(), graph);
  924. EXPECT_EQ(status, ge::GRAPH_FAILED);
  925. }
  926. TEST_F(STestTensorflowParser, tensorflow_model_not_exist) {
  927. ge::Graph graph;
  928. std::string caseDir = __FILE__;
  929. std::size_t idx = caseDir.find_last_of("/");
  930. caseDir = caseDir.substr(0, idx);
  931. // model file is not exist
  932. std::string modelFile = caseDir + "/origin_models/conv2d_explicit1_pad.pb";
  933. auto status = ge::aclgrphParseTensorFlow(modelFile.c_str(), graph);
  934. EXPECT_EQ(status, ge::GRAPH_FAILED);
  935. }
  936. TEST_F(STestTensorflowParser, parser_tensorflow_model) {
  937. std::string caseDir = __FILE__;
  938. std::size_t idx = caseDir.find_last_of("/");
  939. caseDir = caseDir.substr(0, idx);
  940. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  941. const char *model_file = modelFile.c_str();
  942. std::string op_name = "ge_ascend_irgraph";
  943. ge::Graph graph(op_name);
  944. std::map<ge::AscendString, ge::AscendString> parser_options = {
  945. {ge::AscendString(ge::ir_option::INPUT_FORMAT), ge::AscendString("NHWC")},
  946. };
  947. auto ret_graph = ge::aclgrphParseTensorFlow(model_file, parser_options, graph);
  948. EXPECT_EQ(ret_graph, ge::FAILED);
  949. // parser tensorflow model out_node_size is equal to index
  950. string graph_name;
  951. AclGrphParseUtil acl_graph_parse_util;
  952. std::map<AscendString, AscendString> out_nodes_with_node_and_index = {
  953. {AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:1")}};
  954. ParerSTestsUtils::ClearParserInnerCtx();
  955. auto ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_node_and_index, graph_name);
  956. ret_graph = ge::aclgrphParseTensorFlow(model_file, graph);
  957. EXPECT_EQ(ret_graph, domi::FAILED);
  958. // parser tensorflow model success
  959. modelFile = caseDir + "/origin_models/model.pb";
  960. model_file = modelFile.c_str();
  961. out_nodes_with_node_and_index = {{AscendString(ge::ir_option::OUT_NODES), AscendString("x:0;y:0")}};
  962. ParerSTestsUtils::ClearParserInnerCtx();
  963. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_node_and_index, graph_name);
  964. ret_graph = ge::aclgrphParseTensorFlow(model_file, graph);
  965. EXPECT_EQ(ret_graph, domi::SUCCESS);
  966. }
  967. TEST_F(STestTensorflowParser, tensorflow_parser_to_json)
  968. {
  969. TensorFlowModelParser modelParser;
  970. std::string caseDir = __FILE__;
  971. std::size_t idx = caseDir.find_last_of("/");
  972. caseDir = caseDir.substr(0, idx);
  973. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  974. std::string jsonFile = caseDir + "/origin_models/test.json";
  975. const char *model_file = modelFile.c_str();
  976. const char *json_file = jsonFile.c_str();
  977. Status ret = modelParser.ToJson(model_file, json_file);
  978. EXPECT_EQ(ret, SUCCESS);
  979. }
  980. TEST_F(STestTensorflowParser, tensorflow_parserfrommemory_failed)
  981. {
  982. TensorFlowModelParser modelParser;
  983. std::string caseDir = __FILE__;
  984. std::size_t idx = caseDir.find_last_of("/");
  985. caseDir = caseDir.substr(0, idx);
  986. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  987. const char *data = modelFile.c_str();
  988. uint32_t size = 1;
  989. ge::Graph graph;
  990. std::map<ge::AscendString, ge::AscendString> parser_params;
  991. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  992. ASSERT_EQ(ret, SUCCESS);
  993. modelFile = caseDir + "/origin_models/tf_add.pb";
  994. parser_params = {{AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:0")}};
  995. ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  996. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  997. ret = modelParser.ParseFromMemory(data, size, compute_graph);
  998. EXPECT_NE(ret, SUCCESS);
  999. }
  1000. TEST_F(STestTensorflowParser, modelparser_parsefrommemory_success)
  1001. {
  1002. std::string caseDir = __FILE__;
  1003. std::size_t idx = caseDir.find_last_of("/");
  1004. caseDir = caseDir.substr(0, idx);
  1005. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1006. const char* tmp_tf_pb_model = modelFile.c_str();
  1007. ge::Graph graph;
  1008. std::map<ge::AscendString, ge::AscendString> parser_params;
  1009. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  1010. ASSERT_EQ(ret, SUCCESS);
  1011. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  1012. TensorFlowModelParser modelParser;
  1013. MemBuffer* memBuffer = MemBufferFromFile(tmp_tf_pb_model);
  1014. PreChecker::Instance().HasError() == false;
  1015. ret = modelParser.ParseFromMemory((char*)memBuffer->data, memBuffer->size, compute_graph);
  1016. free(memBuffer->data);
  1017. delete memBuffer;
  1018. }
  1019. TEST_F(STestTensorflowParser, weightsparser_parsefrommemory_success)
  1020. {
  1021. std::string caseDir = __FILE__;
  1022. std::size_t idx = caseDir.find_last_of("/");
  1023. caseDir = caseDir.substr(0, idx);
  1024. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1025. const char* tmp_tf_pb_model = modelFile.c_str();
  1026. ge::Graph graph;
  1027. std::map<ge::AscendString, ge::AscendString> parser_params;
  1028. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  1029. ASSERT_EQ(ret, SUCCESS);
  1030. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  1031. auto weights_parser = domi::WeightsParserFactory::Instance()->CreateWeightsParser(domi::TENSORFLOW);
  1032. MemBuffer* memBuffer = MemBufferFromFile(tmp_tf_pb_model);
  1033. ret = weights_parser->ParseFromMemory((char*)memBuffer->data, memBuffer->size, compute_graph);
  1034. free(memBuffer->data);
  1035. delete memBuffer;
  1036. EXPECT_EQ(SUCCESS, ret);
  1037. }
  1038. std::string getGraphCallbackV2(string subgraph_name)
  1039. {
  1040. std::string caseDir = __FILE__;
  1041. std::size_t idx = caseDir.find_last_of("/");
  1042. caseDir = caseDir.substr(0, idx);
  1043. subgraph_name = caseDir + "/origin_models/tf_add.pb";
  1044. return subgraph_name;
  1045. }
  1046. TEST_F(STestTensorflowParser, parser_ParseProtoWithSubgraphV2)
  1047. {
  1048. std::string caseDir = __FILE__;
  1049. std::size_t idx = caseDir.find_last_of("/");
  1050. caseDir = caseDir.substr(0, idx);
  1051. const std::string root_proto = caseDir + "/origin_models/tf_add.pb";
  1052. ge::Graph graph;
  1053. std::map<ge::AscendString, ge::AscendString> parser_params;
  1054. Status ret = ge::aclgrphParseTensorFlow(root_proto.c_str(), parser_params, graph);
  1055. ASSERT_EQ(ret, SUCCESS);
  1056. ge::ComputeGraphPtr root_graph = ge::GraphUtils::GetComputeGraph(graph);
  1057. domi::GetGraphCallbackV2 callback(&getGraphCallbackV2);
  1058. TensorFlowModelParser parser;
  1059. ret = parser.ParseProtoWithSubgraph(root_proto, callback, root_graph);
  1060. }
  1061. TEST_F(STestTensorflowParser, parser_ConvertToGeDataType)
  1062. {
  1063. // convert to ge type success
  1064. const uint32_t type1 = domi::tensorflow::DataType::DT_FLOAT;
  1065. TensorFlowModelParser parser;
  1066. ge::DataType dataType = parser.ConvertToGeDataType(type1);
  1067. ASSERT_EQ(dataType, ge::DataType::DT_FLOAT);
  1068. const uint32_t type2 = 80; // invalid type
  1069. dataType = parser.ConvertToGeDataType(type2);
  1070. ASSERT_EQ(dataType, ge::DataType::DT_UNDEFINED);
  1071. }
  1072. TEST_F(STestTensorflowParser, tensorflow_parser_with_external_normal_graph) {
  1073. // 1. Create root graph
  1074. ComputeGraphPtr root_graph = ge::parser::MakeShared<ge::ComputeGraph>("root_graph");
  1075. MakeGraph(root_graph, "root_graph");
  1076. // 2. Create ONNX sub graph
  1077. // 2.1 Sub graph of onnx graph
  1078. ge::ComputeGraphPtr sub_sub_graph = ge::parser::MakeShared<ge::ComputeGraph>("sub_sub");
  1079. // 2.2 ONNX graph
  1080. ComputeGraphPtr sub_graph = ge::parser::MakeShared<ge::ComputeGraph>("sub_sub");
  1081. MakeGraph(sub_graph, "sub_graph");
  1082. auto add = sub_graph->FindNode("sub_graph_add");
  1083. ASSERT_NE(add, nullptr);
  1084. add->GetOpDesc()->AddSubgraphName("sub_sub_graph");
  1085. add->GetOpDesc()->SetSubgraphInstanceName(0, sub_sub_graph->GetName());
  1086. sub_graph->AddSubGraph(sub_sub_graph);
  1087. auto input1 = sub_graph->FindNode("sub_graph_input1");
  1088. ASSERT_NE(input1, nullptr);
  1089. AttrUtils::SetInt(input1->GetOpDesc(), ATTR_NAME_INDEX, 0);
  1090. auto input2 = sub_graph->FindNode("sub_graph_input2");
  1091. ASSERT_NE(input2, nullptr);
  1092. AttrUtils::SetInt(input2->GetOpDesc(), ATTR_NAME_INDEX, 1);
  1093. // 3. Serialize ONNX graph to string
  1094. // 3.1 normal
  1095. ge::Model model("model", "");
  1096. model.SetGraph(GraphUtils::CreateGraphFromComputeGraph(sub_graph));
  1097. Buffer buffer;
  1098. graphStatus save_ret = model.Save(buffer, false);
  1099. ASSERT_EQ(save_ret, GRAPH_SUCCESS);
  1100. std::string external_graph(reinterpret_cast<const char *>(buffer.GetData()),
  1101. buffer.GetSize());
  1102. // model will failed
  1103. input1->GetOpDesc()->DelAttr(ATTR_NAME_INDEX);
  1104. ge::Model model_will_fail("model_will_fail", "");
  1105. model_will_fail.SetGraph(GraphUtils::CreateGraphFromComputeGraph(sub_graph));
  1106. Buffer buffer_fail;
  1107. save_ret = model_will_fail.Save(buffer_fail, false);
  1108. ASSERT_EQ(save_ret, GRAPH_SUCCESS);
  1109. std::string external_graph_fail(
  1110. reinterpret_cast<const char *>(buffer_fail.GetData()),
  1111. buffer_fail.GetSize());
  1112. // 4. Set string to function node
  1113. auto root_add = root_graph->FindNode("root_graph_add");
  1114. ASSERT_NE(root_add, nullptr);
  1115. AttrUtils::SetStr(root_add->GetOpDesc(), "_external_model", external_graph);
  1116. auto root_input1 = root_graph->FindNode("root_graph_input1");
  1117. ASSERT_NE(root_input1, nullptr);
  1118. AttrUtils::SetInt(root_input1->GetOpDesc(), ATTR_NAME_INDEX, 0);
  1119. auto root_input2 = root_graph->FindNode("root_graph_input2");
  1120. ASSERT_NE(root_input2, nullptr);
  1121. AttrUtils::SetInt(root_input2->GetOpDesc(), ATTR_NAME_INDEX, 1);
  1122. // 5. Run test (normal)
  1123. auto ret = TensorFlowModelParser::AddExternalGraph(root_graph);
  1124. EXPECT_EQ(ret, SUCCESS);
  1125. EXPECT_EQ(root_graph->GetAllSubgraphs().size(), 2);
  1126. EXPECT_EQ(sub_graph->GetAllSubgraphs().size(), 1);
  1127. EXPECT_NE(root_graph->GetSubgraph(sub_graph->GetName()), nullptr);
  1128. EXPECT_EQ(root_graph->GetSubgraph(sub_graph->GetName())->GetAllSubgraphs().size(), 0);
  1129. }
  1130. TEST_F(STestTensorflowParser, tensorflow_ParserProto_failed)
  1131. {
  1132. std::string caseDir = __FILE__;
  1133. std::size_t idx = caseDir.find_last_of("/");
  1134. caseDir = caseDir.substr(0, idx);
  1135. const std::string root_proto = caseDir + "/origin_models/avgpool3dgrad.pb.txt";
  1136. domi::tensorflow::GraphDef graphDef;
  1137. ge::Graph graph;
  1138. std::map<ge::AscendString, ge::AscendString> parser_params;
  1139. Status ret = ge::aclgrphParseTensorFlow(root_proto.c_str(), parser_params, graph);
  1140. ASSERT_EQ(ret, SUCCESS);
  1141. ge::ComputeGraphPtr root_graph = ge::GraphUtils::GetComputeGraph(graph);
  1142. TensorFlowModelParser tensorflow_parser;
  1143. ret = tensorflow_parser.ParseProto(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  1144. EXPECT_EQ(PARAM_INVALID, ret);
  1145. // proto解析失败
  1146. bool protoRet = parser::ReadProtoFromText(root_proto.c_str(), &graphDef);
  1147. ASSERT_EQ(protoRet, false);
  1148. ret = tensorflow_parser.ParseProto(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  1149. ASSERT_EQ(ret, PARAM_INVALID);
  1150. std::string serialized_proto = "";
  1151. ret = tensorflow_parser.ParseProto(serialized_proto, root_graph);
  1152. ASSERT_EQ(ret, FAILED);
  1153. }
  1154. TEST_F(STestTensorflowParser, tensorflow_parserAllGraph_failed)
  1155. {
  1156. std::string caseDir = __FILE__;
  1157. std::size_t idx = caseDir.find_last_of("/");
  1158. caseDir = caseDir.substr(0, idx);
  1159. const std::string root_proto = caseDir + "/origin_models/conv2d.pb";
  1160. domi::tensorflow::GraphDef graphDef;
  1161. CreateGraphDef(graphDef);
  1162. auto no_op = graphDef.add_node();
  1163. no_op->set_name("no_op");
  1164. no_op->set_op("NoOp");
  1165. no_op->add_input("placeholder0");
  1166. no_op->add_input("placeholder1");
  1167. ge::Graph graph;
  1168. std::map<ge::AscendString, ge::AscendString> parser_params;
  1169. Status ret = ge::aclgrphParseTensorFlow(root_proto.c_str(), parser_params, graph);
  1170. ASSERT_EQ(ret, SUCCESS);
  1171. ge::ComputeGraphPtr root_graph = ge::GraphUtils::GetComputeGraph(graph);
  1172. TensorFlowModelParser tensorflow_parser;
  1173. ret = tensorflow_parser.ParseAllGraph(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  1174. ASSERT_NE(ret, SUCCESS);
  1175. }
  1176. TEST_F(STestTensorflowParser, test_parse_acl_output_nodes)
  1177. {
  1178. AclGrphParseUtil acl_graph_parse_util;
  1179. string graph_name;
  1180. // case 1: Normal with 'node and index'
  1181. ParerSTestsUtils::ClearParserInnerCtx();
  1182. GetParserContext().type = domi::ONNX;
  1183. std::map<AscendString, AscendString> out_nodes_with_node_and_index = {
  1184. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out1:0;Out2:1")}};
  1185. ParerSTestsUtils::ClearParserInnerCtx();
  1186. auto ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_node_and_index, graph_name);
  1187. ASSERT_EQ(ret, SUCCESS);
  1188. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 2);
  1189. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 2);
  1190. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 0);
  1191. // case 2: Normal with 'tensor name'
  1192. ParerSTestsUtils::ClearParserInnerCtx();
  1193. GetParserContext().type = domi::ONNX;
  1194. std::map<AscendString, AscendString> out_nodes_with_tensor_name = {
  1195. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out_tensor_1;Out_tensor_2")}};
  1196. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_with_tensor_name, graph_name);
  1197. ASSERT_EQ(ret, SUCCESS);
  1198. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 0);
  1199. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 0);
  1200. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 2);
  1201. // case 3: Failed with 'node and index' before 'tensor name'
  1202. ParerSTestsUtils::ClearParserInnerCtx();
  1203. GetParserContext().type = domi::ONNX;
  1204. std::map<AscendString, AscendString> out_nodes_mode_mixex_pre = {
  1205. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out1:0;Out2:1;Out_tensor_1;Out_tensor_2")}};
  1206. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_mode_mixex_pre, graph_name);
  1207. ASSERT_EQ(ret, PARAM_INVALID);
  1208. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 2);
  1209. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 2);
  1210. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 0);
  1211. // case 4: Failed with 'node and index' inserted in 'tensor name'
  1212. ParerSTestsUtils::ClearParserInnerCtx();
  1213. GetParserContext().type = domi::ONNX;
  1214. std::map<AscendString, AscendString> out_nodes_mode_mixex_mid = {
  1215. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out_tensor_1;Out1:0;Out2:1;Out_tensor_2")}};
  1216. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_mode_mixex_mid, graph_name);
  1217. ASSERT_EQ(ret, PARAM_INVALID);
  1218. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 0);
  1219. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 0);
  1220. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 1);
  1221. // case 5: Failed with 'node and index' after 'tensor name'
  1222. ParerSTestsUtils::ClearParserInnerCtx();
  1223. GetParserContext().type = domi::ONNX;
  1224. std::map<AscendString, AscendString> out_nodes_mode_mixex_post = {
  1225. {AscendString(ge::ir_option::OUT_NODES), AscendString("Out_tensor_1;Out_tensor_2;Out1:0;Out2:1")}};
  1226. ret = acl_graph_parse_util.ParseParamsBeforeGraph(out_nodes_mode_mixex_post, graph_name);
  1227. ASSERT_EQ(ret, PARAM_INVALID);
  1228. EXPECT_EQ(ge::GetParserContext().user_out_nodes.size(), 0);
  1229. EXPECT_EQ(ge::GetParserContext().out_nodes_map.size(), 0);
  1230. EXPECT_EQ(ge::GetParserContext().user_out_tensors.size(), 2);
  1231. }
  1232. TEST_F(STestTensorflowParser, parse_AutoMappingByOp) {
  1233. static const string KEY_STRING = "key_string";
  1234. static const string KEY_INT = "key_int";
  1235. static const string KEY_FLOAT = "key_float";
  1236. static const string KEY_BOOL = "key_bool";
  1237. static const string KEY_TYPE = "key_type";
  1238. static const string VALUE_STRING = "string";
  1239. static const int64_t VALUE_INT = 1;
  1240. static const float VALUE_FLOAT = 1.0;
  1241. static const bool VALUE_BOOL = true;
  1242. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1243. static const string VALUE_NAME = "test_name";
  1244. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  1245. NodeDef node_def;
  1246. domi::tensorflow::AttrValue value;
  1247. ge::Operator op = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc);
  1248. node_def.set_name(VALUE_NAME);
  1249. value.set_s(VALUE_STRING);
  1250. TensorFlowUtil::AddNodeAttr(KEY_STRING, value, &node_def);
  1251. value.set_i(VALUE_INT);
  1252. TensorFlowUtil::AddNodeAttr(KEY_INT, value, &node_def);
  1253. value.set_f(VALUE_FLOAT);
  1254. TensorFlowUtil::AddNodeAttr(KEY_FLOAT, value, &node_def);
  1255. value.set_b(VALUE_BOOL);
  1256. TensorFlowUtil::AddNodeAttr(KEY_BOOL, value, &node_def);
  1257. value.set_type(VALUE_TYPE);
  1258. TensorFlowUtil::AddNodeAttr(KEY_TYPE, value, &node_def);
  1259. domi::Status status = domi::AutoMappingFn(reinterpret_cast<google::protobuf::Message *>(&node_def), op);
  1260. EXPECT_EQ(domi::SUCCESS, status);
  1261. EXPECT_EQ(VALUE_NAME, op_desc->GetName());
  1262. string value_string = "";
  1263. ge::AttrUtils::GetStr(op_desc, KEY_STRING, value_string);
  1264. EXPECT_EQ(VALUE_STRING, value_string);
  1265. int64_t value_int = 0;
  1266. ge::AttrUtils::GetInt(op_desc, KEY_INT, value_int);
  1267. EXPECT_EQ(VALUE_INT, value_int);
  1268. float value_float = 0.0;
  1269. ge::AttrUtils::GetFloat(op_desc, KEY_FLOAT, value_float);
  1270. EXPECT_EQ(VALUE_FLOAT, value_float);
  1271. bool value_bool = false;
  1272. ge::AttrUtils::GetBool(op_desc, KEY_BOOL, value_bool);
  1273. EXPECT_EQ(VALUE_BOOL, value_bool);
  1274. ge::DataType data_type = ge::DT_UNDEFINED;
  1275. ge::AttrUtils::GetDataType(op_desc, KEY_TYPE, data_type);
  1276. EXPECT_EQ(ge::DT_FLOAT, data_type);
  1277. // test AutoMappingByOpFn
  1278. ge::OpDescPtr op_desc_dest = std::make_shared<ge::OpDesc>();
  1279. ge::Operator op_dest = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc_dest);
  1280. status = domi::AutoMappingByOpFn(op, op_dest);
  1281. EXPECT_EQ(domi::SUCCESS, status);
  1282. EXPECT_EQ(VALUE_NAME, op_dest.GetName());
  1283. value_string = "";
  1284. ge::AttrUtils::GetStr(op_desc_dest, KEY_STRING, value_string);
  1285. EXPECT_EQ(VALUE_STRING, value_string);
  1286. value_int = 0;
  1287. ge::AttrUtils::GetInt(op_desc_dest, KEY_INT, value_int);
  1288. EXPECT_EQ(VALUE_INT, value_int);
  1289. value_float = 0.0;
  1290. ge::AttrUtils::GetFloat(op_desc_dest, KEY_FLOAT, value_float);
  1291. EXPECT_EQ(VALUE_FLOAT, value_float);
  1292. value_bool = false;
  1293. ge::AttrUtils::GetBool(op_desc_dest, KEY_BOOL, value_bool);
  1294. EXPECT_EQ(VALUE_BOOL, value_bool);
  1295. data_type = ge::DT_UNDEFINED;
  1296. ge::AttrUtils::GetDataType(op_desc_dest, KEY_TYPE, data_type);
  1297. EXPECT_EQ(ge::DT_FLOAT, data_type);
  1298. }
  1299. TEST_F(STestTensorflowParser, parse_ParseNodeDef)
  1300. {
  1301. NodeDef * node_def = new NodeDef();
  1302. node_def->set_name("test_name");
  1303. node_def->set_op("PlaceholderWithDefault");
  1304. bool isDatasetInit = true;
  1305. TensorFlowModelParser model_parser;
  1306. Status ret = model_parser.AdaptOpType(node_def, isDatasetInit);
  1307. EXPECT_EQ(domi::SUCCESS, ret);
  1308. node_def->set_op("Add");
  1309. ret = model_parser.AdaptOpType(node_def, isDatasetInit);
  1310. EXPECT_EQ(domi::SUCCESS, ret);
  1311. delete node_def;
  1312. }
  1313. TEST_F(STestTensorflowParser, parse_AddFmkNode)
  1314. {
  1315. TensorFlowModelParser modelParser;
  1316. std::string caseDir = __FILE__;
  1317. std::size_t idx = caseDir.find_last_of("/");
  1318. caseDir = caseDir.substr(0, idx);
  1319. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1320. ge::Graph graph;
  1321. string graph_name;
  1322. AclGrphParseUtil acl_graph_parse_util;
  1323. std::map<ge::AscendString, ge::AscendString> parser_options = {{AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:0")}};
  1324. ParerSTestsUtils::ClearParserInnerCtx();
  1325. Status ret = acl_graph_parse_util.ParseParamsBeforeGraph(parser_options, graph_name);
  1326. ret = aclgrphParseTensorFlow(modelFile.c_str(), parser_options, graph);
  1327. ASSERT_EQ(ret, SUCCESS);
  1328. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  1329. tensorflow::GraphDef *graphDef = new (std::nothrow) tensorflow::GraphDef();
  1330. ScopePassManager pass_manager;
  1331. std::shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graphDef);
  1332. std::string fusion_op_name = "fusion_op_name";
  1333. FusionScopesResult *fusion_rlt = new (std::nothrow) FusionScopesResult();
  1334. EXPECT_NE(fusion_rlt, nullptr);
  1335. fusion_rlt->Init();
  1336. GenFusionScopesResult(scope_graph, fusion_rlt, fusion_op_name);
  1337. GenOriginContext(&modelParser, fusion_op_name);
  1338. // origin inner node def
  1339. NodeDef* node_def = MallocNodeDef("scope_node_1", "Add");
  1340. EXPECT_NE(node_def, nullptr);
  1341. modelParser.fusion_op_nodedef_map_[fusion_op_name].push_back(node_def);
  1342. bool train_flag_backup = ge::GetParserContext().train_flag;
  1343. ge::GetParserContext().train_flag = true;
  1344. REGISTER_CUSTOM_OP("Identity")
  1345. .FrameworkType(domi::TENSORFLOW)
  1346. .OriginOpType("Identity")
  1347. .ParseParamsFn(ParseParams)
  1348. .ImplyType(ImplyType::TVM);
  1349. REGISTER_CUSTOM_OP("Constant")
  1350. .FrameworkType(domi::TENSORFLOW)
  1351. .OriginOpType("Const")
  1352. .ParseParamsFn(ParseParams)
  1353. .ImplyType(ImplyType::TVM);
  1354. register_tbe_op();
  1355. std::vector<std::string> node_name_list;
  1356. GenOriginNodeDef(&modelParser, node_name_list);
  1357. std::set<std::string> malloc_node_name_list(node_name_list.begin(), node_name_list.end());
  1358. node_name_list.push_back(fusion_op_name);
  1359. ret = modelParser.AddFmkNode(compute_graph, scope_graph, node_name_list, false);
  1360. EXPECT_EQ(ret, PARAM_INVALID);
  1361. EXPECT_EQ(modelParser.scope_inner_node_map_.size(), 0);
  1362. EXPECT_EQ(modelParser.nodedef_map_.size(), 5);
  1363. ret = modelParser.AddEdges(compute_graph);
  1364. EXPECT_EQ(ret, SUCCESS);
  1365. // release resource
  1366. delete graphDef;
  1367. delete node_def;
  1368. modelParser.DeleteFuisonNodeDef();
  1369. FreeNodeDefMap(&modelParser, malloc_node_name_list);
  1370. ge::GetParserContext().train_flag = train_flag_backup;
  1371. }
  1372. TEST_F(STestTensorflowParser, parse_AddScopeInnerNode)
  1373. {
  1374. TensorFlowModelParser modelParser;
  1375. std::string caseDir = __FILE__;
  1376. std::size_t idx = caseDir.find_last_of("/");
  1377. caseDir = caseDir.substr(0, idx);
  1378. std::string modelFile = caseDir + "/origin_models/tf_add.pb";
  1379. std::string op_name = "ge_ascend_irgraph";
  1380. ge::Graph graph(op_name);
  1381. ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
  1382. std::map<ge::AscendString, ge::AscendString> parser_params = {
  1383. {AscendString(ge::ir_option::OUT_NODES), AscendString("Placeholder:0;Placeholder_1:0")}};
  1384. Status ret = ge::aclgrphParseTensorFlow(modelFile.c_str(), parser_params, graph);
  1385. EXPECT_EQ(ret, SUCCESS);
  1386. std::mutex graph_mutex;
  1387. tensorflow::NodeDef *node_def = initNodeDef();
  1388. node_def->set_name("FastrcnnPredictions");
  1389. node_def->set_op("FastrcnnPredictions");
  1390. // can't find in scope_inner_node_map
  1391. ret = modelParser.AddScopeInnerNode(&modelParser, compute_graph, &graph_mutex, node_def);
  1392. EXPECT_EQ(ret, PARAM_INVALID);
  1393. delete node_def;
  1394. }
  1395. TEST_F(STestTensorflowParser, dyncmic_rnn_scope_pass_plugin_test) {
  1396. ge::Graph graph;
  1397. std::cout << __FILE__ << std::endl;
  1398. std::string caseDir = __FILE__;
  1399. std::size_t idx = caseDir.find_last_of("/");
  1400. caseDir = caseDir.substr(0, idx);
  1401. std::string modelFile = caseDir + "/origin_models/tensor_array.pb";
  1402. std::map<ge::AscendString, ge::AscendString> params;
  1403. string key ="enable_scope_fusion_passes";
  1404. string value ="ScopeDynamicRNNPass";
  1405. params.insert(std::make_pair(ge::AscendString(key.c_str()), ge::AscendString(value.c_str())));
  1406. auto status = aclgrphParseTensorFlow(modelFile.c_str(), params, graph);
  1407. EXPECT_EQ(status, SUCCESS);
  1408. }
  1409. TEST_F(STestTensorflowParser, avgpool3dgrad_plugin_test_format_NDHWC) {
  1410. ge::Graph graph;
  1411. std::cout << __FILE__ << std::endl;
  1412. std::string caseDir = __FILE__;
  1413. std::size_t idx = caseDir.find_last_of("/");
  1414. caseDir = caseDir.substr(0, idx);
  1415. std::string modelFile = caseDir + "/origin_models/avgpool3dgrad_case_1.pb";
  1416. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1417. EXPECT_EQ(status, SUCCESS);
  1418. }
  1419. TEST_F(STestTensorflowParser, tensorflow_merge_test) {
  1420. ge::Graph graph;
  1421. std::cout << __FILE__ << std::endl;
  1422. std::string caseDir = __FILE__;
  1423. std::size_t idx = caseDir.find_last_of("/");
  1424. caseDir = caseDir.substr(0, idx);
  1425. std::string modelFile = caseDir + "/origin_models/merge.pb";
  1426. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1427. EXPECT_EQ(status, FAILED);
  1428. }
  1429. TEST_F(STestTensorflowParser, tensorflow_no_op_test) {
  1430. ge::Graph graph;
  1431. std::cout << __FILE__ << std::endl;
  1432. std::string caseDir = __FILE__;
  1433. std::size_t idx = caseDir.find_last_of("/");
  1434. caseDir = caseDir.substr(0, idx);
  1435. std::string modelFile = caseDir + "/origin_models/test_no_op.pb";
  1436. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1437. EXPECT_EQ(status, SUCCESS);
  1438. }
  1439. TEST_F(STestTensorflowParser, tensorflow_identity_test) {
  1440. ge::Graph graph;
  1441. std::cout << __FILE__ << std::endl;
  1442. std::string caseDir = __FILE__;
  1443. std::size_t idx = caseDir.find_last_of("/");
  1444. caseDir = caseDir.substr(0, idx);
  1445. std::string modelFile = caseDir + "/origin_models/test_identity.pb";
  1446. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1447. EXPECT_EQ(status, SUCCESS);
  1448. }
  1449. TEST_F(STestTensorflowParser, tensorflow_constant_test) {
  1450. ge::Graph graph;
  1451. std::cout << __FILE__ << std::endl;
  1452. std::string caseDir = __FILE__;
  1453. std::size_t idx = caseDir.find_last_of("/");
  1454. caseDir = caseDir.substr(0, idx);
  1455. std::string modelFile = caseDir + "/origin_models/test_constant.pb";
  1456. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1457. EXPECT_EQ(status, SUCCESS);
  1458. TensorFlowConstantParser constantParser;
  1459. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("constant", ge::parser::CONSTANT);
  1460. NodeDef* node_def = initNodeDef();
  1461. node_def->set_name("Constant");
  1462. auto params = constantParser.ParseParams(node_def, op_dest);
  1463. EXPECT_EQ(params, SUCCESS);
  1464. auto value = constantParser.ParseValue(node_def, op_dest);
  1465. EXPECT_EQ(value, SUCCESS);
  1466. ConstantOperator op;
  1467. auto type = constantParser.ParseDType(node_def, &op);
  1468. EXPECT_EQ(type, SUCCESS);
  1469. }
  1470. TEST_F(STestTensorflowParser, tensorflow_reshpae_test) {
  1471. ge::Graph graph;
  1472. std::cout << __FILE__ << std::endl;
  1473. std::string caseDir = __FILE__;
  1474. std::size_t idx = caseDir.find_last_of("/");
  1475. caseDir = caseDir.substr(0, idx);
  1476. std::string modelFile = caseDir + "/origin_models/test_reshape.pb";
  1477. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1478. EXPECT_EQ(status, SUCCESS);
  1479. TensorFlowReshapeParser parser;
  1480. NodeDef * nodeDef = new NodeDef();
  1481. ge::OpDescPtr opdef_ = make_shared<::ge::OpDesc>("","");
  1482. google::protobuf::Map<std::string, tensorflow::AttrValue > *attr_map = nodeDef->mutable_attr();
  1483. domi::tensorflow::AttrValue tshape_attr_value;
  1484. tshape_attr_value.set_type(domi::tensorflow::DT_INT32);
  1485. (*attr_map)[TENSORFLOW_ATTR_TSHAPE] = tshape_attr_value;
  1486. domi::tensorflow::AttrValue t_attr_value;
  1487. t_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1488. (*attr_map)[TENSORFLOW_ATTR_T] = t_attr_value;
  1489. Status ret = parser.ParseParams(nodeDef, opdef_);
  1490. EXPECT_EQ(domi::SUCCESS, ret);
  1491. delete nodeDef;
  1492. }
  1493. TEST_F(STestTensorflowParser, tensorflow_squeeze_test) {
  1494. ge::Graph graph;
  1495. std::cout << __FILE__ << std::endl;
  1496. std::string caseDir = __FILE__;
  1497. std::size_t idx = caseDir.find_last_of("/");
  1498. caseDir = caseDir.substr(0, idx);
  1499. std::string modelFile = caseDir + "/origin_models/test_sequeeze.pb";
  1500. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1501. EXPECT_EQ(status, SUCCESS);
  1502. TensorFlowSqueezeParser parser;
  1503. NodeDef *nodeDef = initNodeDef();
  1504. ge::OpDescPtr opDef = make_shared<::ge::OpDesc>("Squeeze","Squeeze");
  1505. Status ret = parser.ParseParams(nodeDef, opDef);
  1506. EXPECT_EQ(ret, SUCCESS);
  1507. NodeDef *nodeDef_dim = initNodeDef_dims();
  1508. ret = parser.ParseParams(nodeDef_dim, opDef);
  1509. EXPECT_EQ(SUCCESS, ret);
  1510. NodeDef *nodeDef_axis_dims = initNodeDef_axis_dims();
  1511. ret = parser.ParseParams(nodeDef_axis_dims, opDef);
  1512. EXPECT_EQ(GRAPH_PARAM_INVALID, ret);
  1513. static const string KEY_SHAPE_LIST = "key_shape_list";
  1514. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1515. static const string KEY_DEFAULT = "key_default";
  1516. NodeDef *nodeDef2 = new NodeDef();
  1517. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = nodeDef2->mutable_attr();
  1518. domi::tensorflow::AttrValue dtype_attr_value ;
  1519. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1520. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1521. //设置strides属性
  1522. tensorflow::AttrValue axis_attr_value;
  1523. tensorflow::AttrValue_ListValue *list = axis_attr_value.mutable_list();
  1524. list->add_i(1);
  1525. list->add_i(2);
  1526. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1527. domi::tensorflow::AttrValue value;
  1528. domi::tensorflow::AttrValue df_attr_value;
  1529. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1530. domi::tensorflow::AttrValue pad_attr_value;
  1531. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1532. domi::tensorflow::AttrValue shape;
  1533. shape.mutable_list()->add_i((int64)32);
  1534. shape.mutable_list()->add_i((int64)32);
  1535. shape.mutable_list()->add_i((int64)14);
  1536. static const string KEY_TYPE_LIST = "key_type_list";
  1537. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "input_tensor_desc";
  1538. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1539. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1540. value.clear_value();
  1541. value.mutable_list()->add_type(VALUE_TYPE);
  1542. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, nodeDef2);
  1543. value.clear_value();
  1544. domi::tensorflow::NameAttrList name_attr_list;
  1545. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1546. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1547. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1548. *(value.mutable_list()->add_func()) = name_attr_list;
  1549. nodeDef2->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  1550. nodeDef2->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1551. ret = parser.ParseParams(nodeDef2, opDef);
  1552. EXPECT_EQ(domi::SUCCESS, ret);
  1553. GeTensorDesc ge_desc;
  1554. ge_desc.SetFormat(ge::FORMAT_C1HWNCoC0);
  1555. ge_desc.SetDataType(ge::DT_FLOAT);
  1556. ge_desc.SetShape(GeShape({1,1,1,1,1,1}));
  1557. ret = parser.ParseDesc(value, ge_desc);
  1558. EXPECT_EQ(ret, SUCCESS);
  1559. delete nodeDef2;
  1560. delete nodeDef_axis_dims;
  1561. delete nodeDef_dim;
  1562. delete nodeDef;
  1563. }
  1564. TEST_F(STestTensorflowParser, tensorflow_fill_test) {
  1565. ge::Graph graph;
  1566. std::cout << __FILE__ << std::endl;
  1567. std::string caseDir = __FILE__;
  1568. std::size_t idx = caseDir.find_last_of("/");
  1569. caseDir = caseDir.substr(0, idx);
  1570. std::string modelFile = caseDir + "/origin_models/test_fill.pb";
  1571. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1572. EXPECT_EQ(status, SUCCESS);
  1573. }
  1574. TEST_F(STestTensorflowParser, tensorflow_shape_n_test) {
  1575. ge::Graph graph;
  1576. std::cout << __FILE__ << std::endl;
  1577. std::string caseDir = __FILE__;
  1578. std::size_t idx = caseDir.find_last_of("/");
  1579. caseDir = caseDir.substr(0, idx);
  1580. std::string modelFile = caseDir + "/origin_models/test_shape_n.pb";
  1581. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1582. EXPECT_EQ(status, SUCCESS);
  1583. }
  1584. TEST_F(STestTensorflowParser, tensorflow_switch_test) {
  1585. ge::Graph graph;
  1586. std::cout << __FILE__ << std::endl;
  1587. std::string caseDir = __FILE__;
  1588. std::size_t idx = caseDir.find_last_of("/");
  1589. caseDir = caseDir.substr(0, idx);
  1590. std::string modelFile = caseDir + "/origin_models/test_switch.pb";
  1591. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1592. EXPECT_EQ(status, SUCCESS);
  1593. TensorFlowRefSwitchParser refSwitchParser;
  1594. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("constant", ge::parser::CONSTANT);
  1595. NodeDef* node_def = initNodeDef();
  1596. node_def->set_name("RefSwitch");
  1597. auto params = refSwitchParser.ParseParams(node_def, op_dest);
  1598. EXPECT_EQ(params, SUCCESS);
  1599. RefSwitchOperator op;
  1600. auto parseRet = refSwitchParser.ParseT(node_def, &op);
  1601. EXPECT_EQ(parseRet, SUCCESS);
  1602. }
  1603. TEST_F(STestTensorflowParser, tensorflow_enter_test) {
  1604. ge::Graph graph;
  1605. std::cout << __FILE__ << std::endl;
  1606. std::string caseDir = __FILE__;
  1607. std::size_t idx = caseDir.find_last_of("/");
  1608. caseDir = caseDir.substr(0, idx);
  1609. std::string modelFile = caseDir + "/origin_models/test_enter.pb";
  1610. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1611. EXPECT_EQ(status, SUCCESS);
  1612. TensorFlowEnterParser enterParser;
  1613. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("Enter", ge::parser::ENTER);
  1614. NodeDef* node_def = initNodeDef();
  1615. node_def->set_name("Enter");
  1616. Status ret = enterParser.ParseParams(node_def, op_dest);
  1617. EXPECT_EQ(ret, FAILED);
  1618. static const string KEY_SHAPE_LIST = "key_shape_list";
  1619. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1620. static const string KEY_DEFAULT = "key_default";
  1621. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  1622. domi::tensorflow::AttrValue dtype_attr_value;
  1623. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1624. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1625. //设置strides属性
  1626. domi::tensorflow::AttrValue axis_attr_value;
  1627. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  1628. list->add_i(1);
  1629. list->add_i(2);
  1630. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1631. domi::tensorflow::AttrValue value;
  1632. domi::tensorflow::AttrValue df_attr_value;
  1633. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1634. domi::tensorflow::AttrValue pad_attr_value;
  1635. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1636. domi::tensorflow::AttrValue shape;
  1637. shape.mutable_list()->add_i((int64)32);
  1638. shape.mutable_list()->add_i((int64)32);
  1639. shape.mutable_list()->add_i((int64)14);
  1640. static const string KEY_TYPE_LIST = "key_type_list";
  1641. const std::string ENTER_ATTR_FRAME_NAME = "frame_name";
  1642. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1643. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1644. value.clear_value();
  1645. value.mutable_list()->add_type(VALUE_TYPE);
  1646. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  1647. value.clear_value();
  1648. domi::tensorflow::NameAttrList name_attr_list;
  1649. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1650. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1651. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1652. *(value.mutable_list()->add_func()) = name_attr_list;
  1653. node_def->mutable_attr()->insert({ge::ENTER_ATTR_FRAME_NAME, value});
  1654. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1655. ret = enterParser.ParseParams(node_def, op_dest);
  1656. EXPECT_EQ(ret, FAILED);
  1657. }
  1658. TEST_F(STestTensorflowParser, tensorflow_VariableV2_test) {
  1659. ge::Graph graph;
  1660. std::string caseDir = __FILE__;
  1661. std::size_t idx = caseDir.find_last_of("/");
  1662. caseDir = caseDir.substr(0, idx);
  1663. std::string modelFile = caseDir + "/origin_models/test_VariableV2.pb";
  1664. auto status = aclgrphParseTensorFlow(modelFile.c_str(), graph);
  1665. EXPECT_EQ(status, SUCCESS);
  1666. }
  1667. TEST_F(STestTensorflowParser, tensorflow_fusion_op_parser_test)
  1668. {
  1669. TensorFlowFusionOpParser fusionOpParser;
  1670. ge::OpDescPtr op_dest = make_shared<ge::OpDesc>("FusionOp", ge::parser::CONSTANT);
  1671. int index = 0;
  1672. NodeDef* node_def = fusioninitNodeDef(index);
  1673. node_def->set_name("FusionOp");
  1674. auto ret = fusionOpParser.ParseParams(node_def, op_dest);
  1675. EXPECT_EQ(ret, SUCCESS);
  1676. int32_t param = 1;
  1677. ret = fusionOpParser.ParseParamFromConst(node_def, param);
  1678. EXPECT_EQ(ret, SUCCESS);
  1679. ret = fusionOpParser.ParseParamFromConst(node_def, param, index);
  1680. EXPECT_EQ(ret, SUCCESS);
  1681. float params = 0.0;
  1682. ret = fusionOpParser.ParseParamFromConst(node_def, params);
  1683. EXPECT_EQ(ret, SUCCESS);
  1684. index = 2;
  1685. node_def = fusioninitNodeDef(index);
  1686. ret = fusionOpParser.ParseParamFromConst(node_def, params, index);
  1687. EXPECT_EQ(ret, domi::PARAM_INVALID);
  1688. ret = fusionOpParser.ParseHalfFromConst(node_def, params, 0);
  1689. EXPECT_EQ(ret, SUCCESS);
  1690. ret = fusionOpParser.ParseHalfFromConst(node_def, params, 3);
  1691. EXPECT_EQ(ret, domi::PARAM_INVALID);
  1692. node_def = fusioninitNodeDef(0);
  1693. ret = fusionOpParser.ParseHalfFromConst(node_def, params, 3);
  1694. EXPECT_EQ(ret, domi::PARAM_INVALID);
  1695. static const float VALUE_FLOAT = 1.0;
  1696. ge::GeTensorPtr weight = nullptr;
  1697. ret = fusionOpParser.ParseWeightFromConst(node_def, weight);
  1698. EXPECT_EQ(ret, domi::SUCCESS);
  1699. EXPECT_NE(weight, nullptr);
  1700. ge::DataType ge_data_type = weight->GetTensorDesc().GetDataType();
  1701. EXPECT_EQ(ge_data_type, ge::DataType::DT_FLOAT);
  1702. const uint8_t* data_buff = weight->GetData().GetData();
  1703. size_t data_size = weight->GetData().size();
  1704. EXPECT_NE(data_buff, nullptr);
  1705. EXPECT_EQ(data_size, sizeof(float));
  1706. float value_float = *((float*)data_buff);
  1707. EXPECT_EQ(value_float, VALUE_FLOAT);
  1708. delete node_def;
  1709. }
  1710. TEST_F(STestTensorflowParser, tensorflow_auto_mapping_parser_adapter_test)
  1711. {
  1712. ge::OpDescPtr op_dest = nullptr;
  1713. Message *op_src = nullptr;
  1714. TensorFlowAutoMappingParserAdapter autoMappingParser;
  1715. NodeDef* node_def = initNodeDef();
  1716. Status ret = autoMappingParser.ParseParams(op_src, op_dest);
  1717. EXPECT_EQ(ret, PARAM_INVALID);
  1718. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1719. EXPECT_EQ(ret, PARAM_INVALID);
  1720. op_dest = make_shared<ge::OpDesc>("AutoMapping", ge::parser::CONSTANT);
  1721. op_dest->SetType(ge::parser::EMPTY);
  1722. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1723. EXPECT_EQ(ret, SUCCESS);
  1724. op_dest->SetType(ge::parser::IDENTITYN);
  1725. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1726. EXPECT_EQ(ret, SUCCESS);
  1727. op_dest->SetType(ge::parser::SIZE);
  1728. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1729. EXPECT_EQ(ret, SUCCESS);
  1730. op_dest->SetType(ge::parser::SHAPE);
  1731. op_dest->AddOutputDesc(GeTensorDesc());
  1732. ret = autoMappingParser.ParseParams(node_def, op_dest);
  1733. EXPECT_EQ(ret, SUCCESS);
  1734. }
  1735. TEST_F(STestTensorflowParser, tensorflow_fusion_custom_parser_adapter_test)
  1736. {
  1737. REGISTER_CUSTOM_OP("FusionCustom")
  1738. .FrameworkType(domi::TENSORFLOW)
  1739. .OriginOpType("FusionCustom")
  1740. .FusionParseParamsFn(FusionParserParams)
  1741. .ImplyType(ImplyType::TVM);
  1742. register_tbe_op();
  1743. auto graph = std::make_shared<ge::ComputeGraph>("FusionCustom");
  1744. auto op_desc = std::make_shared<ge::OpDesc>("FusionCustom", "FusionCustom");
  1745. auto node = graph->AddNode(op_desc);
  1746. NodeDef *node_def = new NodeDef();
  1747. std::vector<const NodeDef *> v_input_const1;
  1748. v_input_const1.push_back(node_def);
  1749. TensorFlowFusionCustomParserAdapter parser;
  1750. domi::Status status = parser.ParseParams(v_input_const1, node);
  1751. EXPECT_EQ(SUCCESS, status);
  1752. ge::Operator op_src("pool", "pooling");
  1753. std::vector<ge::Operator> v_input_const2;
  1754. v_input_const2.push_back(op_src);
  1755. Status ret = parser.ParseParams(v_input_const2, node);
  1756. EXPECT_EQ(FAILED, ret);
  1757. delete node_def;
  1758. }
  1759. TEST_F(STestTensorflowParser, tensorflow_custom_parser_adapter_test)
  1760. {
  1761. ge::Operator op_src("pool", "pooling");
  1762. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1763. TensorFlowCustomParserAdapter parser;
  1764. Status ret = parser.ParseParams(op_src, op_dest);
  1765. EXPECT_EQ(ret, FAILED);
  1766. REGISTER_CUSTOM_OP("Variable")
  1767. .FrameworkType(domi::TENSORFLOW)
  1768. .OriginOpType("VariableV2")
  1769. .ParseParamsFn(ParseParams)
  1770. .ParseParamsByOperatorFn(ParseParamByOpFunc)
  1771. .ImplyType(ImplyType::CUSTOM);
  1772. register_tbe_op();
  1773. Operator opSrc(ge::parser::VARIABLE, "VariableV2");
  1774. ret = parser.ParseParams(opSrc, op_dest);
  1775. EXPECT_EQ(ret, SUCCESS);
  1776. }
  1777. TEST_F(STestTensorflowParser, tensorflow_graph_functiondef_FindAttrValue_test)
  1778. {
  1779. GraphToFunctionDef functionDef;
  1780. NodeDef *node_def = nullptr;
  1781. std::string attr_name = "Const";
  1782. tensorflow::AttrValue attr_value;
  1783. bool ret = functionDef.FindAttrValue(node_def, attr_name, attr_value);
  1784. EXPECT_EQ(ret, false);
  1785. node_def = initNodeDef();
  1786. attr_name = ge::ATTR_NAME_INPUT_TENSOR_DESC;
  1787. node_def->set_name("Const");
  1788. ret = functionDef.FindAttrValue(node_def, attr_name, attr_value);
  1789. EXPECT_EQ(ret, false);
  1790. }
  1791. TEST_F(STestTensorflowParser, tensorflow_graph_functiondef_BuildFunctionDef_test)
  1792. {
  1793. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  1794. string inputNodeType = "DATA";
  1795. MakeDagGraph(subGraph, inputNodeType);
  1796. FunctionDefLibrary library;
  1797. tensorflow::NodeDef call_node_def;
  1798. call_node_def.set_op("fusionop");
  1799. call_node_def.set_name("fusionop");
  1800. vector<ge::InDataAnchorPtr> in_anchor;
  1801. vector<ge::OutDataAnchorPtr> out_anchor;
  1802. for (ge::NodePtr node : subGraph->GetAllNodes()) {
  1803. for (auto in : node->GetAllInDataAnchors()) {
  1804. if (in->GetPeerOutAnchor() != nullptr && in->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc()->GetType() == parser::DATA) {
  1805. in_anchor.push_back(in);
  1806. }
  1807. }
  1808. for (auto out : node->GetAllOutDataAnchors()) {
  1809. for (auto i : out->GetPeerInDataAnchors()) {
  1810. if (i->GetOwnerNode()->GetOpDesc()->GetType() == parser::NETOUTPUT) {
  1811. out_anchor.push_back(out);
  1812. }
  1813. }
  1814. }
  1815. }
  1816. Status ret = GraphToFunctionDef::BuildFunctionDef(subGraph,
  1817. "fusionop",
  1818. &library,
  1819. &call_node_def,
  1820. in_anchor,
  1821. out_anchor);
  1822. EXPECT_EQ(domi::INTERNAL_ERROR, ret);
  1823. }
  1824. TEST_F(STestTensorflowParser, tensorflow_CheckOpShapeDim_test)
  1825. {
  1826. NodeDef *node_def = initNodeDef();
  1827. std::set<int> dims;
  1828. dims.insert(1);
  1829. dims.insert(2);
  1830. bool valid = true;
  1831. TensorFlowModelParser parser;
  1832. Status ret = parser.CheckOpShapeDim(node_def, dims, valid);
  1833. EXPECT_EQ(ret, SUCCESS);
  1834. static const string KEY_SHAPE_LIST = "key_shape_list";
  1835. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1836. static const string KEY_DEFAULT = "key_default";
  1837. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  1838. domi::tensorflow::AttrValue dtype_attr_value;
  1839. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1840. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1841. //设置strides属性
  1842. domi::tensorflow::AttrValue axis_attr_value;
  1843. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  1844. list->add_i(1);
  1845. list->add_i(2);
  1846. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1847. domi::tensorflow::AttrValue value;
  1848. domi::tensorflow::AttrValue df_attr_value;
  1849. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1850. domi::tensorflow::AttrValue pad_attr_value;
  1851. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1852. domi::tensorflow::AttrValue shape;
  1853. shape.mutable_list()->add_i((int64)32);
  1854. shape.mutable_list()->add_i((int64)32);
  1855. shape.mutable_list()->add_i((int64)14);
  1856. static const string KEY_TYPE_LIST = "key_type_list";
  1857. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "input_tensor_desc";
  1858. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1859. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1860. value.clear_value();
  1861. value.mutable_list()->add_type(VALUE_TYPE);
  1862. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  1863. value.clear_value();
  1864. domi::tensorflow::NameAttrList name_attr_list;
  1865. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1866. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1867. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1868. *(value.mutable_list()->add_func()) = name_attr_list;
  1869. node_def->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  1870. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1871. ret = parser.CheckOpShapeDim(node_def, dims, valid);
  1872. EXPECT_EQ(ret, SUCCESS);
  1873. }
  1874. TEST_F(STestTensorflowParser, tensorflow_Scope_pass_test)
  1875. {
  1876. ScopePassManager passmanager;
  1877. auto scope_graph = ge::parser::MakeShared<ge::ScopeGraph>();
  1878. if (scope_graph == nullptr) {
  1879. GELOGE(FAILED, "Scope graph make shared failed.");
  1880. return;
  1881. }
  1882. if (scope_graph->Init() != SUCCESS) {
  1883. GELOGE(FAILED, "Scope graph init failed.");
  1884. return;
  1885. }
  1886. ge::TensorFlowModelParser tf_model_parser;
  1887. std::vector<string> scope_passes_list = {"ScopeBasicLSTMCellPass", "ScopeLayerNormPass"};
  1888. Status ret = tf_model_parser.RunScopeFusionPass(scope_passes_list, passmanager, scope_graph);
  1889. EXPECT_NE(ge::SUCCESS, ret);
  1890. }
  1891. TEST_F(STestTensorflowParser, tensorflow_variable_v2_parser_test)
  1892. {
  1893. TensorFlowCustomParserAdapter parser;
  1894. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1895. NodeDef *node_def = initNodeDef();
  1896. TensorFlowModelParser modelParser;
  1897. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1898. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("Variable");
  1899. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1900. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1901. EXPECT_EQ(ret, PARAM_INVALID);
  1902. node_def->set_name("TemporaryVariable");
  1903. node_def->set_op("TemporaryVariable");
  1904. op_parser = factory->CreateOpParser("TemporaryVariable");
  1905. tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1906. ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1907. EXPECT_EQ(ret, PARAM_INVALID);
  1908. NodeDef *nodeDef_temporaryVariable = initOpNodeDef_TemporaryVariable();
  1909. op_parser = factory->CreateOpParser("TemporaryVariable");
  1910. tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1911. ret = tensorflow_op_parser->ParseParams(nodeDef_temporaryVariable, op_dest);
  1912. EXPECT_EQ(ret, SUCCESS);
  1913. NodeDef *nodeDef_VariableV2 = initOpNodeDef_VariableV2();
  1914. op_parser = factory->CreateOpParser("Variable");
  1915. tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1916. ret = tensorflow_op_parser->ParseParams(nodeDef_VariableV2, op_dest);
  1917. EXPECT_EQ(ret, SUCCESS);
  1918. }
  1919. TEST_F(STestTensorflowParser, tensorflow_var_is_initialized_op_test)
  1920. {
  1921. TensorFlowCustomParserAdapter parser;
  1922. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1923. NodeDef *node_def = initNodeDef();
  1924. TensorFlowModelParser modelParser;
  1925. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1926. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("VarIsInitializedOp");
  1927. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1928. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1929. EXPECT_EQ(ret, SUCCESS);
  1930. }
  1931. TEST_F(STestTensorflowParser, tensorflow_arg_parser_test)
  1932. {
  1933. TensorFlowCustomParserAdapter parser;
  1934. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1935. NodeDef *node_def = initNodeDef();
  1936. TensorFlowModelParser modelParser;
  1937. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1938. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("_Arg");
  1939. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1940. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1941. EXPECT_EQ(ret, SUCCESS);
  1942. static const string KEY_SHAPE_LIST = "key_shape_list";
  1943. static const string KEY_TENSOR_LIST = "key_tensor_list";
  1944. static const string KEY_DEFAULT = "key_default";
  1945. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  1946. domi::tensorflow::AttrValue dtype_attr_value;
  1947. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  1948. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  1949. //设置strides属性
  1950. domi::tensorflow::AttrValue axis_attr_value;
  1951. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  1952. list->add_i(1);
  1953. list->add_i(2);
  1954. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  1955. domi::tensorflow::AttrValue value;
  1956. domi::tensorflow::AttrValue df_attr_value;
  1957. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  1958. domi::tensorflow::AttrValue pad_attr_value;
  1959. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  1960. domi::tensorflow::AttrValue shape;
  1961. shape.mutable_list()->add_i((int64)32);
  1962. shape.mutable_list()->add_i((int64)32);
  1963. shape.mutable_list()->add_i((int64)14);
  1964. static const string KEY_TYPE_LIST = "key_type_list";
  1965. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "input_tensor_desc";
  1966. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  1967. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  1968. value.clear_value();
  1969. value.mutable_list()->add_type(VALUE_TYPE);
  1970. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  1971. value.clear_value();
  1972. domi::tensorflow::NameAttrList name_attr_list;
  1973. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  1974. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  1975. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  1976. *(value.mutable_list()->add_func()) = name_attr_list;
  1977. node_def->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  1978. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  1979. ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1980. EXPECT_EQ(ret, SUCCESS);
  1981. }
  1982. TEST_F(STestTensorflowParser, tensorflow_frameworkop_parser_test1)
  1983. {
  1984. TensorFlowCustomParserAdapter parser;
  1985. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  1986. NodeDef *node_def = initNodeDef();
  1987. TensorFlowModelParser modelParser;
  1988. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  1989. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("FrameworkOp");
  1990. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  1991. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1992. EXPECT_EQ(ret, PARAM_INVALID);
  1993. ChangeDataType(node_def, tensorflow::DT_UINT16);
  1994. ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  1995. EXPECT_EQ(ret, PARAM_INVALID);
  1996. }
  1997. TEST_F(STestTensorflowParser, tensorflow_frameworkop_parser_test2)
  1998. {
  1999. TensorFlowCustomParserAdapter parser;
  2000. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  2001. NodeDef *node_def = initNodeDef();
  2002. node_def->set_name("FrameworkOp");
  2003. node_def->set_op("_Retval");
  2004. TensorFlowModelParser modelParser;
  2005. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  2006. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("FrameworkOp");
  2007. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  2008. static const string KEY_SHAPE_LIST = "key_shape_list";
  2009. static const string KEY_TENSOR_LIST = "key_tensor_list";
  2010. static const string KEY_DEFAULT = "key_default";
  2011. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  2012. domi::tensorflow::AttrValue dtype_attr_value;
  2013. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  2014. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  2015. //设置strides属性
  2016. domi::tensorflow::AttrValue axis_attr_value;
  2017. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  2018. list->add_i(1);
  2019. list->add_i(2);
  2020. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  2021. domi::tensorflow::AttrValue value;
  2022. domi::tensorflow::AttrValue df_attr_value;
  2023. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  2024. domi::tensorflow::AttrValue pad_attr_value;
  2025. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  2026. domi::tensorflow::AttrValue shape;
  2027. shape.mutable_list()->add_i((int64)32);
  2028. shape.mutable_list()->add_i((int64)32);
  2029. shape.mutable_list()->add_i((int64)14);
  2030. static const string KEY_TYPE_LIST = "key_type_list";
  2031. const std::string ATTR_NAME_INPUT_TENSOR_DESC = "ATTR_NAME_FRAMEWORK_OP_DEF";
  2032. const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc";
  2033. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  2034. value.clear_value();
  2035. value.mutable_list()->add_type(VALUE_TYPE);
  2036. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  2037. value.clear_value();
  2038. domi::tensorflow::NameAttrList name_attr_list;
  2039. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  2040. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  2041. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  2042. *(value.mutable_list()->add_func()) = name_attr_list;
  2043. node_def->mutable_attr()->insert({ge::ATTR_NAME_INPUT_TENSOR_DESC, value});
  2044. node_def->mutable_attr()->insert({ge::ATTR_NAME_OUTPUT_TENSOR_DESC, value});
  2045. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  2046. EXPECT_EQ(ret, SUCCESS);
  2047. }
  2048. TEST_F(STestTensorflowParser, tensorflow_reshape_parser_test)
  2049. {
  2050. TensorFlowCustomParserAdapter parser;
  2051. ge::OpDescPtr op_dest = std::make_shared<ge::OpDesc>();
  2052. NodeDef *node_def = initNodeDef();
  2053. TensorFlowModelParser modelParser;
  2054. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  2055. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("Reshape");
  2056. shared_ptr<TensorFlowOpParser> tensorflow_op_parser = std::dynamic_pointer_cast<TensorFlowOpParser>(op_parser);
  2057. Status ret = tensorflow_op_parser->ParseParams(node_def, op_dest);
  2058. EXPECT_EQ(ret, SUCCESS);
  2059. NodeDef * nodeDef = new NodeDef();
  2060. nodeDef->set_op("Reshape");
  2061. google::protobuf::Map< ::std::string, ::tensorflow::AttrValue >* node_attr_map = nodeDef->mutable_attr();
  2062. domi::tensorflow::AttrValue attr_value;
  2063. attr_value.mutable_list()->add_i((int64)32);
  2064. attr_value.mutable_list()->add_i((int64)32);
  2065. attr_value.mutable_list()->add_i((int64)14);
  2066. domi::tensorflow::AttrValue df_attr_value2;
  2067. df_attr_value2.set_s(TENSORFLOWF_TENSOR_NHWC);
  2068. (*node_attr_map)[TENSORFLOW_ATTR_DATA_FORMAT] = df_attr_value2;
  2069. domi::tensorflow::AttrValue df_attr_value;
  2070. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  2071. //设置padding属性
  2072. domi::tensorflow::AttrValue pad_attr_value2;
  2073. pad_attr_value2.set_s(TENSORFLOWF_OP_PADDING_SAME);
  2074. (*node_attr_map)[TENSORFLOW_ATTR_PADDING] = pad_attr_value2;
  2075. domi::tensorflow::AttrValue pad_attr_value;
  2076. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  2077. domi::tensorflow::NameAttrList name_attr_list;
  2078. name_attr_list.mutable_attr()->insert({"serialize_shape", attr_value});
  2079. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  2080. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  2081. *(attr_value.mutable_list()->add_func()) = name_attr_list;
  2082. GeTensorDesc ge_desc;
  2083. ge_desc.SetFormat(ge::FORMAT_C1HWNCoC0);
  2084. ge_desc.SetDataType(ge::DT_FLOAT);
  2085. ge_desc.SetShape(GeShape({1,1,1,1,1,1}));
  2086. TensorFlowReshapeParser reshapeParser;
  2087. ret = reshapeParser.ParseDesc(attr_value, ge_desc);
  2088. EXPECT_EQ(ret, SUCCESS);
  2089. }
  2090. TEST_F(STestTensorflowParser, tensorflow_DefunToPartitionedCall_parser_test)
  2091. {
  2092. TensorFlowModelParser parser;
  2093. NodeDef *node_def = initNodeDef();
  2094. node_def->set_name("ShapeN");
  2095. ge::OpDescPtr op = make_shared<ge::OpDesc>("ShapeN", ge::parser::PARTITIONEDCALL);
  2096. Status ret = parser.DefunToPartitionedCall(node_def, op);
  2097. EXPECT_EQ(ret, FAILED);
  2098. static const string KEY_SHAPE_LIST = "key_shape_list";
  2099. static const string KEY_TENSOR_LIST = "key_tensor_list";
  2100. static const string KEY_DEFAULT = "key_default";
  2101. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = node_def->mutable_attr();
  2102. domi::tensorflow::AttrValue dtype_attr_value;
  2103. dtype_attr_value.set_type(domi::tensorflow::DT_FLOAT);
  2104. (*node_attr_map)[TENSORFLOW_ATTR_T] = dtype_attr_value;
  2105. //设置strides属性
  2106. domi::tensorflow::AttrValue axis_attr_value;
  2107. ::tensorflow::AttrValue_ListValue* list = axis_attr_value.mutable_list();
  2108. list->add_i(1);
  2109. list->add_i(2);
  2110. (*node_attr_map)[ge::SQUEEZE_ATTR_AXIS] = axis_attr_value;
  2111. domi::tensorflow::AttrValue value;
  2112. domi::tensorflow::AttrValue df_attr_value;
  2113. df_attr_value.set_i((int64_t)ccTensorFormat_t::CC_TENSOR_NHWC);
  2114. domi::tensorflow::AttrValue pad_attr_value;
  2115. pad_attr_value.set_i((int64_t)tensorflow::DT_FLOAT);
  2116. domi::tensorflow::AttrValue shape;
  2117. shape.mutable_list()->add_i((int64)32);
  2118. shape.mutable_list()->add_i((int64)32);
  2119. shape.mutable_list()->add_i((int64)14);
  2120. static const string KEY_TYPE_LIST = "key_type_list";
  2121. static const domi::tensorflow::DataType VALUE_TYPE = domi::tensorflow::DataType::DT_FLOAT;
  2122. value.clear_value();
  2123. value.mutable_list()->add_type(VALUE_TYPE);
  2124. TensorFlowUtil::AddNodeAttr(KEY_TYPE_LIST, value, node_def);
  2125. value.clear_value();
  2126. domi::tensorflow::NameAttrList name_attr_list;
  2127. name_attr_list.mutable_attr()->insert({"serialize_datatype", pad_attr_value});
  2128. name_attr_list.mutable_attr()->insert({"serialize_format", df_attr_value});
  2129. name_attr_list.mutable_attr()->insert({"serialize_shape", shape});
  2130. *(value.mutable_list()->add_func()) = name_attr_list;
  2131. node_def->mutable_attr()->insert({"_disable_call_shape_inference", value});
  2132. node_def->mutable_attr()->insert({"_disable_call_shape_inference", value});
  2133. std::string fusion_op_name = "pre_node_a";
  2134. GenOriginContext(&parser, fusion_op_name);
  2135. node_def->set_name("pre_node_a");
  2136. ret = parser.DefunToPartitionedCall(node_def, op);
  2137. EXPECT_EQ(ret, SUCCESS);
  2138. }
  2139. TEST_F(STestTensorflowParser, tensorflow_TransNodeToOpDesc_parser_test)
  2140. {
  2141. TensorFlowModelParser parser;
  2142. NodeDef *node_def = initNodeDef();
  2143. node_def->set_name("ge::parser::DATA");
  2144. std::string op_type = "ge::parser::DATA";
  2145. ge::OpDescPtr op = make_shared<ge::OpDesc>("constant", ge::parser::CONSTANT);
  2146. Status ret = parser.TransNodeToOpDesc(node_def, op, op_type);
  2147. EXPECT_EQ(ret, FAILED);
  2148. }
  2149. domi::Status fusion_parse_param_by_op(const std::vector<ge::Operator> &op_src, ge::Operator &op) {
  2150. return domi::SUCCESS;
  2151. }
  2152. TEST_F(STestTensorflowParser, Fusion_node_parse_params_success) {
  2153. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2154. ModelParserFactory* factory = ModelParserFactory::Instance();
  2155. shared_ptr<ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2156. ASSERT_TRUE(NULL != model_parser);
  2157. TensorFlowModelParser tensorflow_parser;
  2158. domi::tensorflow::NodeDef node_def;
  2159. node_def.set_name("data");
  2160. node_def.set_op("FusionCustom");
  2161. FusionParseParamByOpFunc function = fusion_parse_param_by_op;
  2162. shared_ptr<ge::OpParserFactory> op_parser = ge::OpParserFactory::Instance(domi::TENSORFLOW);
  2163. shared_ptr<OpParser> fusion_op_parser = op_parser->CreateFusionOpParser("FusionCustom");
  2164. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2165. ge::OpDescPtr op1 = std::make_shared<ge::OpDesc>("data", "FusionCustom");
  2166. ge::NodePtr node1 = std::make_shared<ge::Node>(op1, graph);
  2167. vector<const NodeDef *> node_defs;
  2168. node_defs.push_back(&node_def);
  2169. tensorflow_parser.fusion_op_nodedef_map_["data"] = node_defs;
  2170. Status ret = tensorflow_parser.FusionNodeParseParams(fusion_op_parser, &node_def, node1);
  2171. EXPECT_EQ(domi::SUCCESS, ret);
  2172. }
  2173. TEST_F(STestTensorflowParser, Tensorflow_recordFusionResult_parser_test)
  2174. {
  2175. auto scope_graph = ge::parser::MakeShared<ge::ScopeGraph>();
  2176. if (scope_graph == nullptr) {
  2177. GELOGE(FAILED, "Scope graph make shared failed.");
  2178. return;
  2179. }
  2180. if (scope_graph->Init() != SUCCESS) {
  2181. GELOGE(FAILED, "Scope graph init failed.");
  2182. return;
  2183. }
  2184. domi::tensorflow::NodeDef node_def;
  2185. node_def.set_name("OP");
  2186. FusionScopesResult *fusion_scope_rlt = new (std::nothrow) FusionScopesResult();
  2187. if (fusion_scope_rlt == nullptr) {
  2188. GELOGE(FAILED, "FusionScopesResult make shared failed.");
  2189. return;
  2190. }
  2191. fusion_scope_rlt->Init();
  2192. fusion_scope_rlt->SetName("OP");
  2193. auto &impl_scope_graph = scope_graph->impl_;
  2194. std::string scope_name = fusion_scope_rlt->Name();
  2195. impl_scope_graph->fusion_results_.insert(std::make_pair(scope_name, fusion_scope_rlt));
  2196. std::vector<ge::OperatorPtr> nodes;
  2197. ge::OperatorPtr op = ge::parser::MakeShared<ge::Operator>("op_name", "op_type");
  2198. if (op == nullptr) {
  2199. GELOGE(FAILED, "Operator make shared failed.");
  2200. return;
  2201. }
  2202. nodes.push_back(op);
  2203. fusion_scope_rlt->impl_->AddNodes(nodes);
  2204. ge::OpDescPtr opDesc = std::make_shared<ge::OpDesc>();
  2205. ge::TensorFlowModelParser tf_model_parser;
  2206. Status ret = tf_model_parser.RecordFusionResult(scope_graph, &node_def, opDesc);
  2207. EXPECT_EQ(SUCCESS, ret);
  2208. }
  2209. TEST_F(STestTensorflowParser, Tensorflow_UpdateFusionOpContext_test)
  2210. {
  2211. ModelParserFactory* factory = ModelParserFactory::Instance();
  2212. shared_ptr<domi::ModelParser> model_parser = factory->CreateModelParser(domi::TENSORFLOW);
  2213. TensorFlowModelParser tensorflow_parser;
  2214. ScopeFusionOpInfo info;
  2215. ge::OpNodeContext normal_op_node_context;
  2216. ge::OpNodeContext fusion_op_node_context;
  2217. /* 1.预置条件 */
  2218. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2219. ScopePassManager passmanager;
  2220. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2221. NodeDef * node1 = graph->add_node();
  2222. node1->set_name("conv_conv5/BatchNorm/batchnorm/add");
  2223. node1->set_op("Add");
  2224. node1->add_input("conv_conv5/BatchNorm/moving_variance");
  2225. node1->add_input("conv_conv5/BatchNorm/batchnorm/add/y");
  2226. NodeDef * node2 = graph->add_node();
  2227. node2->set_name("conv_conv5/BatchNorm/moving_variance");
  2228. node2->set_op("Const");
  2229. NodeDef * node3 = graph->add_node();
  2230. node3->set_name("conv_conv5/BatchNorm/batchnorm/add/y");
  2231. node3->set_op("Const");
  2232. info.fusion_node_name = "conv_conv5/BatchNorm/batchnorm";
  2233. info.fusion_op_type = ge::parser::FUSIONBATCHNORM;
  2234. info.node_name = "conv_conv5/BatchNorm/batchnorm/add";
  2235. info.description = "";
  2236. info.scope_pass = false;
  2237. EXPECT_EQ(scope_graph->impl_->GetFusionScopesResults(nullptr), nullptr);
  2238. EXPECT_EQ(scope_graph->impl_->GetFusionScopesResults(node1), nullptr);
  2239. Status ret = tensorflow_parser.UpdateFusionOpContext(scope_graph, info, fusion_op_node_context, normal_op_node_context);
  2240. EXPECT_EQ(ret, domi::SUCCESS);
  2241. delete graph;
  2242. }
  2243. TEST_F(STestTensorflowParser, Tensorflow_GetInOutPutIndex_scope_pass)
  2244. {
  2245. ModelParserFactory* factory = ModelParserFactory::Instance();
  2246. shared_ptr<domi::ModelParser> model_parser = factory->CreateModelParser(domi::TENSORFLOW);
  2247. TensorFlowModelParser tensorflow_parser;
  2248. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2249. ScopePassManager passmanager;
  2250. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2251. FusionScopesResult* fusion_rlt = new FusionScopesResult();
  2252. fusion_rlt->Init();
  2253. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("fw/fw/ToInt32" ,{0}));
  2254. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("bw/bw/ToInt32" ,{0}));
  2255. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("bw/ReverseSequence" ,{0, 1}));
  2256. fusion_rlt->impl_->inputs_.insert(std::make_pair<string, vector<int32_t>>("bw/ReverseSequence" ,{1}));
  2257. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("concat" ,{0}));
  2258. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("fw/fw/while/Exit_3" ,{1}));
  2259. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("fw/fw/while/Exit_4" ,{2}));
  2260. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("bw/bw/while/Exit_3" ,{3}));
  2261. fusion_rlt->impl_->outputs_.insert(std::make_pair<string, vector<int32_t>>("bw/bw/while/Exit_4" ,{4}));
  2262. fusion_rlt->SetType("dynamic_rnn");
  2263. fusion_rlt->SetName("dynamic_rnn_node1");
  2264. scope_graph->impl_->AddFusionScopesResult(fusion_rlt);
  2265. ScopeFusionOpInfo info1;
  2266. info1.node_name = "fw/fw/ToInt32";
  2267. info1.fusion_node_name = "dynamic_rnn_node1";
  2268. info1.fusion_op_type = "dynamic_rnn";
  2269. info1.description = "";
  2270. info1.scope_pass = true;
  2271. bool ignore = false;
  2272. ignore = tensorflow_parser.FusionOpChildIgnore(scope_graph, info1);
  2273. EXPECT_EQ(true, !ignore);
  2274. ScopeFusionOpInfo info2;
  2275. info2.node_name = "fw/fw/others";
  2276. info2.fusion_node_name = "dynamic_rnn_node1";
  2277. info2.fusion_op_type = "dynamic_rnn";
  2278. info2.description = "";
  2279. info2.scope_pass = true;
  2280. ignore = tensorflow_parser.FusionOpChildIgnore(scope_graph, info2);
  2281. EXPECT_EQ(true, ignore);
  2282. ScopeFusionOpInfo input_node_info;
  2283. input_node_info.node_name = "fw/fw/ToInt32";
  2284. input_node_info.fusion_node_name = "dynamic_rnn_node1";
  2285. input_node_info.fusion_op_type = "dynamic_rnn";
  2286. input_node_info.description = "";
  2287. input_node_info.scope_pass = true;
  2288. ScopeFusionOpInfo output_node_info;
  2289. output_node_info.node_name = "fw/fw/while/Exit_3";
  2290. output_node_info.fusion_node_name = "dynamic_rnn_node1";
  2291. output_node_info.fusion_op_type = "dynamic_rnn";
  2292. output_node_info.description = "";
  2293. output_node_info.scope_pass = true;
  2294. int32_t old_index = 0, new_index = -1;
  2295. Status ret = tensorflow_parser.GetInPutIndex(scope_graph, input_node_info, old_index, new_index);
  2296. EXPECT_EQ(domi::SUCCESS, ret);
  2297. EXPECT_EQ(true, (new_index == 0));
  2298. ret = tensorflow_parser.GetOutPutIndex(scope_graph, output_node_info, old_index, new_index);
  2299. EXPECT_EQ(domi::SUCCESS, ret);
  2300. EXPECT_EQ(true, (new_index == 1));
  2301. delete graph;
  2302. }
  2303. TEST_F(STestTensorflowParser, Tensorflow_AddFusionNodeDef_add_fusion_op_succ)
  2304. {
  2305. ModelParserFactory* factory = ModelParserFactory::Instance();
  2306. shared_ptr<domi::ModelParser> model_parser = factory->CreateModelParser(domi::TENSORFLOW);
  2307. TensorFlowModelParser tensorflow_parser;
  2308. string fusion_op_name = "dropout";
  2309. string fusion_op_type = "Dropout";
  2310. string description = "test/dropout";
  2311. tensorflow_parser.fusion_op_type_map_[fusion_op_name].push_back(fusion_op_type);
  2312. tensorflow_parser.fusion_op_type_map_[fusion_op_name].push_back(description);
  2313. // op_node_context for fusion op
  2314. ge::OpNodeContext op_node_context;
  2315. op_node_context.input_map["pre_node_a"].push_back({0, 0});
  2316. op_node_context.input_map["pre_node_b"].push_back({0, 1});
  2317. tensorflow_parser.op_node_context_map_[fusion_op_name] = op_node_context;
  2318. // origin inner node def
  2319. NodeDef* node_def = new (std::nothrow) NodeDef();
  2320. node_def->set_name("scope_node_1");
  2321. node_def->set_op("Add");
  2322. tensorflow_parser.fusion_op_nodedef_map_[fusion_op_name].push_back(node_def);
  2323. ScopePassManager pass_manager;
  2324. tensorflow::GraphDef *graph = new (std::nothrow) tensorflow::GraphDef();
  2325. shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graph);
  2326. vector<string> node_name_list = {fusion_op_name};
  2327. Status ret = tensorflow_parser.AddFusionNodeDef(scope_graph, node_name_list);
  2328. EXPECT_EQ(ret, SUCCESS);
  2329. EXPECT_EQ(tensorflow_parser.nodedef_map_.size(), 1);
  2330. auto fusion_node_def = tensorflow_parser.nodedef_map_[fusion_op_name];
  2331. EXPECT_NE(fusion_node_def, nullptr);
  2332. EXPECT_EQ(fusion_node_def->op(), fusion_op_type);
  2333. delete node_def;
  2334. delete graph;
  2335. tensorflow_parser.DeleteFuisonNodeDef();
  2336. }
  2337. TEST_F(STestTensorflowParser, remain_dpop_node)
  2338. {
  2339. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2340. ge::OpDescPtr op = std::make_shared<ge::OpDesc>("dpop_123", "FrameworkOp");
  2341. ge::NodePtr node = std::make_shared<ge::Node>(op, graph);
  2342. graph->AddNode(node);
  2343. ModelParserFactory* factory = ModelParserFactory::Instance();
  2344. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2345. ASSERT_TRUE(NULL != model_parser);
  2346. TensorFlowModelParser tensorflow_parser;
  2347. Status ret = tensorflow_parser.RemoveIsolateNode(graph);
  2348. EXPECT_EQ(domi::SUCCESS, ret);
  2349. }
  2350. TEST_F(STestTensorflowParser, tensorflow_UpdateEdgesControlInfo_test)
  2351. {
  2352. TensorFlowModelParser model_parser;
  2353. ge::ScopeFusionOpInfo info;
  2354. info.fusion_node_name = "conv_conv5/BatchNorm/batchnorm";
  2355. info.fusion_op_type = ge::parser::FUSIONBATCHNORM;
  2356. info.node_name = "conv_conv5/BatchNorm/batchnorm/add";
  2357. info.description = "";
  2358. info.scope_pass = false;
  2359. model_parser.UpdateEdgesControlInfo(info);
  2360. }
  2361. TEST_F(STestTensorflowParser, tensorflow_OptimizeSnapShot_test)
  2362. {
  2363. TensorFlowModelParser model_parser;
  2364. tensorflow::NodeDef *curr_mode_def = initNodeDef();
  2365. std::map<string, NodeDef *> nodedef_map;
  2366. nodedef_map.emplace("pre_node_a", curr_mode_def);
  2367. std::pair<string, int> input_data;
  2368. std::vector<string> control_list;
  2369. std::string curr_node_name = "pre_node_a";
  2370. GenOriginContext(&model_parser, curr_node_name);
  2371. Status ret = model_parser.OptimizeSnapShot(curr_mode_def, nodedef_map, input_data, control_list);
  2372. EXPECT_EQ(ret, INTERNAL_ERROR);
  2373. curr_mode_def->set_name("pre_node_a");
  2374. GenOriginContext(&model_parser, curr_node_name);
  2375. ret = model_parser.OptimizeSnapShot(curr_mode_def, nodedef_map, input_data, control_list);
  2376. EXPECT_EQ(ret, SUCCESS);
  2377. }
  2378. TEST_F(STestTensorflowParser, tensorflow_GraphDefOptimizeSnapShot_test)
  2379. {
  2380. TensorFlowModelParser model_parser;
  2381. tensorflow::GraphDef graph_def;
  2382. tensorflow::NodeDef *curr_mode_def = initNodeDef();
  2383. std::map<string, NodeDef *> nodedef_map;
  2384. nodedef_map.emplace("pre_node_a", curr_mode_def);
  2385. std::vector<NodeDef *> nodedef_to_optimize;
  2386. nodedef_to_optimize.emplace_back(curr_mode_def);
  2387. Status ret = model_parser.GraphDefOptimizeSnapShot(&graph_def, nodedef_map, nodedef_to_optimize);
  2388. EXPECT_EQ(ret, FAILED);
  2389. }
  2390. TEST_F(STestTensorflowParser, tensorflow_SetDestNodeName_test)
  2391. {
  2392. TensorFlowModelParser model_parser;
  2393. GraphDef graph;
  2394. auto arg0 = AddNode(graph, "_Arg", "arg0");
  2395. auto identity0 = AddNode(graph, "Identity", "identity0");
  2396. auto add0 = AddNode(graph, "Add", "add0");
  2397. int32_t input_idx = 0;
  2398. bool is_control = true;
  2399. bool clear_input_flag = true;
  2400. AddInput(arg0, identity0, 0);
  2401. AddInput(identity0, add0, 0);
  2402. Status ret = model_parser.SetDestNodeName(identity0, add0, input_idx, is_control, clear_input_flag);
  2403. EXPECT_EQ(ret, SUCCESS);
  2404. }
  2405. TEST_F(STestTensorflowParser, tensorflow_OptimizeDestroyTemporaryVariable_test)
  2406. {
  2407. ModelParserFactory* factory = ModelParserFactory::Instance();
  2408. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2409. TensorFlowModelParser tensorflow_parser;
  2410. GraphDef graph;
  2411. auto const0 = AddNode(graph, "Const", "Const0");
  2412. auto tmpVar0 = AddNode(graph, "TemporaryVariable", "TemporaryVariable0");
  2413. auto assign0 = AddNode(graph, "Assign", "Assign0");
  2414. auto destroy0 = AddNode(graph, "DestroyTemporaryVariable", "DestroyTemporaryVariable0");
  2415. auto add0 = AddNode(graph, "Add", "Add0");
  2416. google::protobuf::Map< std::string, tensorflow::AttrValue> *node_attr_map = tmpVar0->mutable_attr();
  2417. tensorflow::AttrValue var_name_attr_value;
  2418. var_name_attr_value.set_s("temporary_variable_name");
  2419. (*node_attr_map)[ge::VAR_ATTR_NAME] = var_name_attr_value;
  2420. google::protobuf::Map<std::string, tensorflow::AttrValue>* node_attr_map_destroy = destroy0->mutable_attr();
  2421. tensorflow::AttrValue var_name_attr_value_destroy;
  2422. var_name_attr_value_destroy.set_s("destroy_temporary_variable_name");
  2423. (*node_attr_map_destroy)[ge::VAR_ATTR_NAME] = var_name_attr_value_destroy;
  2424. AddInput(tmpVar0, assign0, 0);
  2425. AddInput(assign0, destroy0, 0);
  2426. AddInput(const0, add0, 0);
  2427. AddInput(destroy0, add0, 1);
  2428. GraphDef* graphDef = &graph;
  2429. int32_t no_input_node_size_original = 0;
  2430. for (int w = 0; w < graphDef->node_size(); w++) {
  2431. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2432. if (nodeTmp->input_size() == 0) {
  2433. no_input_node_size_original++;
  2434. }
  2435. }
  2436. Status ret = tensorflow_parser.GraphDefOptimize(graphDef);
  2437. int32_t no_input_node_size_result = 0;
  2438. for (int w = 0; w < graphDef->node_size(); w++) {
  2439. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2440. if (nodeTmp->input_size() == 0) {
  2441. no_input_node_size_result ++;
  2442. }
  2443. }
  2444. ASSERT_EQ(ret, domi::FAILED);
  2445. ASSERT_EQ(no_input_node_size_original, no_input_node_size_result);
  2446. }
  2447. TEST_F(STestTensorflowParser, tensorflow_OptimizeDestroyTemporaryVariable_test2)
  2448. {
  2449. ModelParserFactory* factory = ModelParserFactory::Instance();
  2450. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2451. TensorFlowModelParser tensorflow_parser;
  2452. GraphDef graph;
  2453. auto const0 = AddNode(graph, "Const", "Const0");
  2454. auto tmpVar0 = AddNode(graph, "TemporaryVariable", "TemporaryVariable0");
  2455. auto assign0 = AddNode(graph, "Assign", "Assign0");
  2456. auto destroy0 = AddNode(graph, "DestroyTemporaryVariable", "DestroyTemporaryVariable0");
  2457. auto add0 = AddNode(graph, "Add", "Add0");
  2458. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map = tmpVar0->mutable_attr();
  2459. tensorflow::AttrValue var_name_attr_value;
  2460. var_name_attr_value.set_s("temporary_variable_name");
  2461. (*node_attr_map)[ge::VAR_ATTR_NAME] = var_name_attr_value;
  2462. google::protobuf::Map<std::string, tensorflow::AttrValue> *node_attr_map_destroy = destroy0->mutable_attr();
  2463. tensorflow::AttrValue var_name_attr_value_destroy;
  2464. var_name_attr_value_destroy.set_s("temporary_variable_name");
  2465. (*node_attr_map_destroy)[ge::VAR_ATTR_NAME] = var_name_attr_value_destroy;
  2466. AddInput(tmpVar0, assign0, 0);
  2467. AddInput(assign0, destroy0, 0);
  2468. AddInput(const0, add0, 0);
  2469. AddInput(destroy0, add0, 1);
  2470. GraphDef* graphDef = &graph;
  2471. int32_t no_input_node_size_original = 0;
  2472. for (int w = 0; w < graphDef->node_size(); w++) {
  2473. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2474. if (nodeTmp->input_size() == 0) {
  2475. no_input_node_size_original ++;
  2476. }
  2477. }
  2478. Status ret = tensorflow_parser.GraphDefOptimize(graphDef);
  2479. int32_t no_input_node_size_result = 0;
  2480. for (int w = 0; w < graphDef->node_size(); w++) {
  2481. tensorflow::NodeDef* nodeTmp = graphDef->mutable_node(w);
  2482. if (nodeTmp->input_size() == 0) {
  2483. no_input_node_size_result ++;
  2484. }
  2485. }
  2486. ASSERT_EQ(ret, domi::SUCCESS);
  2487. ASSERT_EQ(no_input_node_size_original, (no_input_node_size_result - 1));
  2488. }
  2489. TEST_F(STestTensorflowParser, tensorflow_AddControlEdgeAfterRemoveInputs_test)
  2490. {
  2491. tensorflow::GraphDef graph_def;
  2492. TensorFlowModelParser tensorflow_parser;
  2493. tensorflow::NodeDef *node_def = initNodeDef();
  2494. node_def->set_name("Add0");
  2495. node_def->set_op("add");
  2496. std::map<std::string, NodeDef *> all_node_map;
  2497. all_node_map.emplace("Add0", node_def);
  2498. std::vector<std::string> removed_inputs_vec;
  2499. removed_inputs_vec.emplace_back("Add0");
  2500. Status ret = tensorflow_parser.AddControlEdgeAfterRemoveInputs(&graph_def, node_def, all_node_map, removed_inputs_vec);
  2501. EXPECT_EQ(ret, SUCCESS);
  2502. tensorflow::NodeDef *node_swith = initNodeDef();
  2503. node_swith->set_name("switch_op");
  2504. node_swith->set_op(parser::SWITCH);
  2505. all_node_map.emplace("switch_op", node_swith);
  2506. removed_inputs_vec.clear();
  2507. removed_inputs_vec.emplace_back("switch_op");
  2508. ret = tensorflow_parser.AddControlEdgeAfterRemoveInputs(&graph_def, node_swith, all_node_map, removed_inputs_vec);
  2509. EXPECT_EQ(ret, SUCCESS);
  2510. }
  2511. TEST_F(STestTensorflowParser, tensorflow_optimizer_snapshot_no_retval_test) {
  2512. std::string caseDir = __FILE__;
  2513. std::size_t idx = caseDir.find_last_of("/");
  2514. caseDir = caseDir.substr(0, idx);
  2515. const std::string root_proto = caseDir + "/origin_models/test_snapshot.pb";
  2516. domi::tensorflow::GraphDef graphDef;
  2517. bool protoRet =
  2518. parser::ReadProtoFromBinaryFile(root_proto.c_str(), &graphDef);
  2519. ASSERT_EQ(protoRet, true);
  2520. TensorFlowModelParser tensorflow_parser;
  2521. ge::ComputeGraphPtr root_graph =
  2522. ge::parser::MakeShared<ge::ComputeGraph>("tmp_graph");
  2523. Status ret = tensorflow_parser.ParseProto(
  2524. reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  2525. EXPECT_EQ(FAILED, ret);
  2526. }
  2527. TEST_F(STestTensorflowParser, tensorflow_RemoveInputs_test)
  2528. {
  2529. tensorflow::GraphDef graph_def;
  2530. tensorflow::NodeDef *node_def = initNodeDef();
  2531. node_def->set_name("OP");
  2532. node_def->add_input("OP/Input_1");
  2533. node_def->add_input("OP/Input_2");
  2534. std::set<uint32_t> remove_index_set;
  2535. std::map<std::string, NodeDef *> all_node_map;
  2536. TensorFlowModelParser model_parser;
  2537. Status ret = model_parser.RemoveInputs(&graph_def, node_def, remove_index_set, all_node_map);
  2538. EXPECT_EQ(ret, SUCCESS);
  2539. remove_index_set.emplace(0);
  2540. ret = model_parser.RemoveInputs(&graph_def, node_def, remove_index_set, all_node_map);
  2541. EXPECT_EQ(ret, FAILED);
  2542. }
  2543. TEST_F(STestTensorflowParser, tensorflow_UpdateInnerNodeContext_test)
  2544. {
  2545. std::string fusion_op_name = "post_node_a";
  2546. std::vector<std::string> inner_nodes_name;
  2547. inner_nodes_name.emplace_back("post_node_a");
  2548. TensorFlowModelParser model_parser;
  2549. Status ret = model_parser.UpdateInnerNodeContext(fusion_op_name, inner_nodes_name);
  2550. EXPECT_EQ(ret, INTERNAL_ERROR);
  2551. GenOriginContext(&model_parser, fusion_op_name);
  2552. ret = model_parser.UpdateInnerNodeContext(fusion_op_name, inner_nodes_name);
  2553. EXPECT_EQ(ret, SUCCESS);
  2554. }
  2555. TEST_F(STestTensorflowParser, tensorflow_UpdateInnerInputMap_test)
  2556. {
  2557. string fusion_op_name = "post_node_a";
  2558. OpNodeContext fusion_context;
  2559. std::vector<std::string> inner_nodes_name;
  2560. inner_nodes_name.emplace_back("post_node_a");
  2561. std::set<string> fusion_input_nodes;
  2562. fusion_input_nodes.insert("post_node_a");
  2563. TensorFlowModelParser model_parser;
  2564. GenOriginContext(&model_parser, fusion_op_name);
  2565. model_parser.UpdateInnerInputMap(fusion_op_name, fusion_context, inner_nodes_name, fusion_input_nodes);
  2566. }
  2567. TEST_F(STestTensorflowParser, tensorflow_UpdateInnerOutputMap_test)
  2568. {
  2569. string fusion_op_name = "post_node_a";
  2570. OpNodeContext fusion_context;
  2571. std::vector<std::string> inner_nodes_name;
  2572. inner_nodes_name.emplace_back("post_node_a");
  2573. std::set<string> fusion_output_nodes;
  2574. fusion_output_nodes.insert("post_node_a");
  2575. TensorFlowModelParser model_parser;
  2576. GenOriginContext(&model_parser, fusion_op_name);
  2577. model_parser.UpdateInnerOutputMap(fusion_op_name, fusion_context, inner_nodes_name, fusion_output_nodes);
  2578. }
  2579. TEST_F(STestTensorflowParser, tensorflow_ScopePassManager_AddPass_test)
  2580. {
  2581. ScopePassManager passmanager;
  2582. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2583. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2584. unique_ptr<ScopeBasePass> pass;
  2585. pass.reset(new ScopeTestPass());
  2586. EXPECT_EQ(ge::SUCCESS, passmanager.AddPass(pass));
  2587. EXPECT_NE(ge::SUCCESS, passmanager.Run(scope_graph));
  2588. delete graph;
  2589. graph = nullptr;
  2590. }
  2591. TEST_F(STestTensorflowParser, tensorflow_CheckAttrHasType_test1)
  2592. {
  2593. tensorflow::AttrValue attr_value;
  2594. attr_value.mutable_list();
  2595. Status ret = TensorFlowUtil::CheckAttrHasType(attr_value, "int");
  2596. EXPECT_EQ(FAILED, ret);
  2597. attr_value.set_type(DT_INVALID);
  2598. ret = TensorFlowUtil::CheckAttrHasType(attr_value, "type");
  2599. EXPECT_EQ(FAILED, ret);
  2600. tensorflow::AttrValue attr_value2;
  2601. AttrValue_ListValue *list = attr_value2.mutable_list();
  2602. list->add_type(tensorflow::DT_FLOAT);
  2603. list->add_type((tensorflow::DataType)30);
  2604. ret = TensorFlowUtil::CheckAttrHasType(attr_value2, "list(type)");
  2605. EXPECT_EQ(FAILED, ret);
  2606. }
  2607. TEST_F(STestTensorflowParser, tensorflow_CheckAttrHasType_test2)
  2608. {
  2609. tensorflow::AttrValue attr_value;
  2610. AttrValue_ListValue * list = attr_value.mutable_list();
  2611. list->add_type(tensorflow::DT_FLOAT);
  2612. list->add_type(tensorflow::DT_INVALID);
  2613. Status ret = TensorFlowUtil::CheckAttrHasType(attr_value, "list(type)");
  2614. EXPECT_EQ(FAILED, ret);
  2615. attr_value.set_placeholder("test");
  2616. ret = TensorFlowUtil::CheckAttrHasType(attr_value, "");
  2617. EXPECT_EQ(FAILED, ret);
  2618. }
  2619. TEST_F(STestTensorflowParser, tensorflow_TransTensorDescriptor_test)
  2620. {
  2621. tensorflow::AttrValue attr_value;
  2622. AttrValue_ListValue *list = attr_value.mutable_list();
  2623. list->add_type(tensorflow::DT_FLOAT);
  2624. ParserOperator op;
  2625. uint32_t io = TENSORFLOW_NORMAL_INPUT_TENSOR_FLAG;
  2626. std::string type = ge::parser::FUSEDBATCHNORMGRAD;
  2627. Status ret = TensorFlowUtil::TransTensorDescriptor(attr_value, &op, io, type);
  2628. EXPECT_EQ(ret, SUCCESS);
  2629. io = TENSORFLOW_NORMAL_OUTPUT_TENSOR_FLAG;
  2630. ret = TensorFlowUtil::TransTensorDescriptor(attr_value, &op, io, type);
  2631. EXPECT_EQ(ret, SUCCESS);
  2632. }
  2633. TEST_F(STestTensorflowParser, tensorflow_GraphDefOptimizeDestroyTemporaryVariable_test)
  2634. {
  2635. tensorflow::GraphDef *graph_def = nullptr;
  2636. tensorflow::NodeDef *nodeCurrent = initNodeDef();
  2637. TensorFlowModelParser model_parser;
  2638. Status ret = model_parser.GraphDefOptimizeDestroyTemporaryVariable(graph_def, nodeCurrent);
  2639. EXPECT_EQ(ret, FAILED);
  2640. }
  2641. TEST_F(STestTensorflowParser, tensorflow_GetFunctionProto_test)
  2642. {
  2643. std::cout << __FILE__ << std::endl;
  2644. std::string caseDir = __FILE__;
  2645. std::size_t idx = caseDir.find_last_of("/");
  2646. caseDir = caseDir.substr(0, idx);
  2647. std::string file = caseDir + "/origin_models/test_enter.pb";
  2648. domi::tensorflow::GraphDefLibrary graph_def_library;
  2649. TensorFlowModelParser model_parser;
  2650. Status ret = model_parser.GetFunctionProto(file, graph_def_library);
  2651. EXPECT_EQ(ret, FAILED);
  2652. }
  2653. TEST_F(STestTensorflowParser, tensorflow_GetNodeFormat_test)
  2654. {
  2655. NodeDef *node_def1 = initNodeDef();
  2656. node_def1->set_op("NoOp");
  2657. node_def1->set_name("NoOp");
  2658. NodeDef *node_def2 = initNodeDef();
  2659. node_def2->set_op("Add");
  2660. node_def2->set_name("Add0");
  2661. TfTranspose pred_transpose = TO_NCHW;
  2662. domiTensorFormat_t format = domi::DOMI_TENSOR_NC1HWC0;
  2663. std::set<const NodeDef *> visited_node;
  2664. visited_node.emplace(node_def2);
  2665. TensorFlowModelParser model_parser;
  2666. Status ret = model_parser.GetNodeFormat(node_def1, pred_transpose, format, visited_node);
  2667. EXPECT_EQ(ret, FAILED);
  2668. delete node_def1;
  2669. delete node_def2;
  2670. }
  2671. TEST_F(STestTensorflowParser, tensorflow_GetFormatTranspose_test)
  2672. {
  2673. NodeDef *transpose_node = initNodeDef();
  2674. transpose_node->set_op("Transpose");
  2675. TfTranspose transpose_direc = NO_TRANSPOSE;
  2676. TensorFlowModelParser modelParser;
  2677. Status ret = modelParser.GetFormatTranspose(transpose_node, transpose_direc);
  2678. EXPECT_EQ(ret, FAILED);
  2679. delete transpose_node;
  2680. }
  2681. TEST_F(STestTensorflowParser, tensorflow_GetFormatTranspose_test2)
  2682. {
  2683. TensorFlowModelParser modelParser;
  2684. TfTranspose transpose_direc = NO_TRANSPOSE;
  2685. NodeDef *transpose_node = initNodeDef();
  2686. GraphDef graph;
  2687. auto arg0 = AddNode(graph, "_Arg", "arg0");
  2688. auto snapshot0 = AddNode(graph, "Snapshot", "snapshot0");
  2689. auto ret0 = AddNode(graph, "_Retval", "retval0");
  2690. auto arg1 = AddNode(graph, "_Arg", "arg1");
  2691. auto snapshot1 = AddNode(graph, "Snapshot", "snapshot1");
  2692. auto ret1 = AddNode(graph, TENSORFLOWF_NODE_OP_TRANSPOSE, "retval1");
  2693. auto arg2 = AddNode(graph, "_Arg", "arg2");
  2694. auto snapshot2 = AddNode(graph, "Snapshot", "snapshot2");
  2695. auto ret2 = AddNode(graph, TENSORFLOWF_NODE_OP_TRANSPOSE, TENSORFLOWF_NODE_OP_TRANSPOSE);
  2696. AddInput(arg0, snapshot0, 0);
  2697. AddInput(snapshot0, ret0, 0);
  2698. AddInput(arg1, snapshot1, 0);
  2699. AddInput(snapshot1, ret1, 0);
  2700. AddInput(arg2, snapshot2, 0);
  2701. AddInput(snapshot2, ret2, 0);
  2702. AddInput(snapshot0, snapshot1, -1);
  2703. AddInput(snapshot1, snapshot2, -1);
  2704. bool train_flag = ge::GetParserContext().train_flag;
  2705. ge::GetParserContext().train_flag = true;
  2706. ASSERT_EQ(modelParser.GraphDefOptimize(&graph), SUCCESS);
  2707. ge::GetParserContext().train_flag = train_flag;
  2708. modelParser.nodedef_map_["arg1"] = transpose_node;
  2709. modelParser.nodedef_map_["^arg0"] = transpose_node;
  2710. Status ret = modelParser.GetFormatTranspose(ret1, transpose_direc);
  2711. EXPECT_EQ(ret, SUCCESS);
  2712. delete transpose_node;
  2713. }
  2714. TEST_F(STestTensorflowParser, tensorflow_GetTensorflowGraphInOutMap_test)
  2715. {
  2716. TensorFlowModelParser model_parser;
  2717. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2718. tensorflow::NodeDef *node_input = graph->add_node();
  2719. node_input->set_name("name_input");
  2720. node_input->set_op("op_input");
  2721. AddGraphNode(graph, "t_lstm/t_lstm_cell/Sigmoid5", "Sigmoid", "node_input");
  2722. AddGraphNode(graph, "t_lstm/t_lstm_cell/Sigmoid6", "Sigmoid", "node_input");
  2723. AddGraphNode(graph, "t_lstm/t_lstm_cell/Sigmoid7", "Sigmoid", "node_input");
  2724. AddGraphNode(graph, "t_lstm/t_lstm_cell/Mul5", "Mul", "node_input");
  2725. AddGraphNode(graph, "t_lstm/t_lstm_cell/Mul6", "Mul", "node_input");
  2726. AddGraphNode(graph, "t_lstm/t_lstm_cell/Mul7", "Mul", "node_input");
  2727. AddGraphNode(graph, "t_lstm/t_lstm_cell/Relu5", "Relu", "node_input");
  2728. AddGraphNode(graph, "t_lstm/t_lstm_cell/Relu6", "Relu", "node_input");
  2729. Status ret = model_parser.GetTensorflowGraphInOutMap(graph);
  2730. EXPECT_EQ(ret, SUCCESS);
  2731. delete graph;
  2732. }
  2733. TEST_F(STestTensorflowParser, tensorflow_RemoveIsolateNode_test)
  2734. {
  2735. TensorFlowModelParser model_parser;
  2736. tensorflow::GraphDef graph;
  2737. CreateGraphDef(graph);
  2738. Status ret = model_parser.RemoveIsolateNode(&graph);
  2739. EXPECT_EQ(ret, FAILED);
  2740. }
  2741. TEST_F(STestTensorflowParser, tensorflow_AddNodeToGraphAndMarkFormat_test)
  2742. {
  2743. TensorFlowModelParser model_parser;
  2744. ComputeGraphPtr graph = make_shared<ge::ComputeGraph>("default");
  2745. std::vector<std::string> op_node_name_list = {"Const", "placeholder0"};
  2746. GenOriginNodeDef(&model_parser, op_node_name_list);
  2747. Status ret = model_parser.AddNodeToGraphAndMarkFormat(graph, op_node_name_list);
  2748. EXPECT_EQ(ret, INTERNAL_ERROR);
  2749. }
  2750. TEST_F(STestTensorflowParser, tensorflow_ParserNodeDef1_test)
  2751. {
  2752. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2753. ModelParserFactory* factory = ModelParserFactory::Instance();
  2754. shared_ptr<ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2755. ASSERT_TRUE(NULL != model_parser);
  2756. TensorFlowModelParser tensorflow_parser;
  2757. tensorflow_parser.adaptedOpTypeMap_["test_name"] = "POOLING";
  2758. std::mutex graphMutex;
  2759. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2760. ScopePassManager passmanager;
  2761. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2762. domi::tensorflow::NodeDef node_def;
  2763. node_def.set_name("test_name");
  2764. node_def.set_op("POOLING");
  2765. error_message::Context error_context;
  2766. Status ret = ge::TensorFlowModelParser::ParseNodeDef(&tensorflow_parser, compute_graph, &graphMutex, scope_graph, &node_def, error_context);
  2767. EXPECT_EQ(FAILED, ret);
  2768. delete graph;
  2769. }
  2770. TEST_F(STestTensorflowParser, tensorflow_ParserNodeDef2_test)
  2771. {
  2772. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2773. ModelParserFactory* factory = ModelParserFactory::Instance();
  2774. shared_ptr<ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2775. ASSERT_TRUE(NULL != model_parser);
  2776. TensorFlowModelParser tensorflow_parser;
  2777. tensorflow_parser.adaptedOpTypeMap_["Pooling"] = "Pooling";
  2778. std::mutex graphMutex;
  2779. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2780. ScopePassManager passmanager;
  2781. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2782. REGISTER_CUSTOM_OP("Pooling")
  2783. .FrameworkType(domi::TENSORFLOW)
  2784. .OriginOpType("Pooling")
  2785. .ParseParamsFn(ParseParams)
  2786. .ImplyType(ImplyType::TVM);
  2787. register_tbe_op();
  2788. domi::tensorflow::NodeDef node_def;
  2789. node_def.set_name("Pooling");
  2790. node_def.set_op("Pooling");
  2791. error_message::Context error_context;
  2792. Status ret = ge::TensorFlowModelParser::ParseNodeDef(&tensorflow_parser, compute_graph, &graphMutex, scope_graph, &node_def, error_context);
  2793. EXPECT_EQ(FAILED, ret);
  2794. delete graph;
  2795. }
  2796. TEST_F(STestTensorflowParser, tensorflow_AddExternalGraph_test)
  2797. {
  2798. TensorFlowModelParser modelParser;
  2799. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  2800. std::string inputNodeType = "DATA";
  2801. MakeDagGraph(subGraph, inputNodeType);
  2802. Status ret = modelParser.AddExternalGraph(subGraph);
  2803. EXPECT_EQ(ret, SUCCESS);
  2804. }
  2805. TEST_F(STestTensorflowParser, tensorflow_AddFmkNode_test)
  2806. {
  2807. TensorFlowModelParser model_parser;
  2808. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2809. tensorflow::GraphDef *graphDef = new (std::nothrow) tensorflow::GraphDef();
  2810. ScopePassManager pass_manager;
  2811. std::shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graphDef);
  2812. std::vector<std::string> op_node_name_list = {"Const", "placeholder0"};
  2813. GenOriginNodeDef(&model_parser, op_node_name_list);
  2814. Status ret = model_parser.AddFmkNode(compute_graph, scope_graph, op_node_name_list, false);
  2815. EXPECT_EQ(ret, PARAM_INVALID);
  2816. delete graphDef;
  2817. }
  2818. TEST_F(STestTensorflowParser, tensorflow_OptimizeConstNodes4CustomOp_test)
  2819. {
  2820. TensorFlowModelParser model_parser;
  2821. tensorflow::GraphDef graph_def;
  2822. CreateGraphDef(graph_def);
  2823. Status ret = model_parser.OptimizeConstNodes4CustomOp(&graph_def);
  2824. EXPECT_EQ(ret, SUCCESS);
  2825. }
  2826. TEST_F(STestTensorflowParser, OptimizeConstNodes4CustomOp_success)
  2827. {
  2828. GraphDef graph;
  2829. auto bn = AddNode(graph, "FusedBatchNormV3", "FusedBatchNormV3_0");
  2830. auto bn_grad = AddNode(graph, "FusedBatchNormGradV3", "FusedBatchNormGradV3_0");
  2831. AddInput(bn, bn_grad, 0);
  2832. AddInput(bn, bn_grad, 1);
  2833. AddInput(bn, bn_grad, 2);
  2834. AddInput(bn, bn_grad, 3);
  2835. AddInput(bn, bn_grad, 5);
  2836. AddInput(bn, bn_grad, 5);
  2837. GraphDef* graphDef = &graph;
  2838. int before_bn_grad_input_size = bn_grad->input_size();
  2839. ASSERT_EQ(before_bn_grad_input_size, 6);
  2840. ModelParserFactory* factory = ModelParserFactory::Instance();
  2841. shared_ptr<domi::ModelParser> model_parser= factory->CreateModelParser(domi::TENSORFLOW);
  2842. ge::TensorFlowModelParser tensorflow_parser;
  2843. Status ret = tensorflow_parser.OptimizeConstNodes4CustomOp(graphDef);
  2844. int after_bn_grad_input_size = bn_grad->input_size();
  2845. ASSERT_EQ(after_bn_grad_input_size, 6);
  2846. ASSERT_EQ(ret, domi::SUCCESS);
  2847. REGISTER_CUSTOM_OP("BatchNormGrad")
  2848. .FrameworkType(domi::TENSORFLOW)
  2849. .OriginOpType({"FusedBatchNormGradV3", "FusedBatchNormGradV2", "FusedBatchNormGrad"})
  2850. .ParseParamsFn(AutoMappingFn)
  2851. .DelInputWithOriginalType(5, "FusedBatchNormGradV3")
  2852. .ImplyType(ImplyType::TVM);
  2853. register_tbe_op();
  2854. ret = tensorflow_parser.OptimizeConstNodes4CustomOp(graphDef);
  2855. after_bn_grad_input_size = bn_grad->input_size();
  2856. ASSERT_EQ(after_bn_grad_input_size, 6);
  2857. ASSERT_EQ(ret, domi::SUCCESS);
  2858. }
  2859. TEST_F(STestTensorflowParser, tensorflow_ParseOpParams_test)
  2860. {
  2861. TensorFlowModelParser model_parser;
  2862. tensorflow::NodeDef *node_def = initNodeDef();
  2863. node_def->set_name("Pooling");
  2864. node_def->set_op("Pooling");
  2865. ge::OpDescPtr op = std::make_shared<ge::OpDesc>();
  2866. std::shared_ptr<OpParserFactory> factory = OpParserFactory::Instance(domi::TENSORFLOW);
  2867. std::shared_ptr<OpParser> op_parser = factory->CreateOpParser("Pooling");
  2868. Status ret = model_parser.ParseOpParams(node_def, op, op_parser);
  2869. EXPECT_EQ(ret, FAILED);
  2870. node_def->set_name("TensorArrayWrite");
  2871. node_def->set_op("TensorArrayWriteV3");
  2872. op_parser = factory->CreateOpParser("TensorArrayWrite");
  2873. ret = model_parser.ParseOpParams(node_def, op, op_parser);
  2874. EXPECT_EQ(ret, SUCCESS);
  2875. delete node_def;
  2876. }
  2877. TEST_F(STestTensorflowParser, tensorflow_AddFusionInnerNodeDef_test)
  2878. {
  2879. TensorFlowModelParser model_parser;
  2880. ge::ComputeGraphPtr compute_graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  2881. tensorflow::GraphDef *graphDef = new (std::nothrow) tensorflow::GraphDef();
  2882. ScopePassManager pass_manager;
  2883. std::shared_ptr<ScopeGraph> scope_graph = pass_manager.BuildScopeGraph(graphDef);
  2884. std::vector<std::string> op_node_name_list = {"Const", "placeholder0"};
  2885. FusionScopesResult *fusion_scope_rlt = new (std::nothrow) FusionScopesResult();
  2886. fusion_scope_rlt->Init();
  2887. fusion_scope_rlt->SetName("FusionCustom");
  2888. auto &impl_scope_graph = scope_graph->impl_;
  2889. std::string scope_name = fusion_scope_rlt->Name();
  2890. impl_scope_graph->fusion_results_.insert(std::make_pair(scope_name, fusion_scope_rlt));
  2891. std::string fusion_op_name = "FusionCustom";
  2892. GenOriginNodeDef(&model_parser, op_node_name_list);
  2893. GenFusionScopesResult(scope_graph, fusion_scope_rlt, fusion_op_name);
  2894. Status ret = model_parser.AddFusionInnerNodeDef(scope_graph, fusion_op_name, op_node_name_list);
  2895. EXPECT_EQ(ret, INTERNAL_ERROR);
  2896. delete graphDef;
  2897. }
  2898. TEST_F(STestTensorflowParser, Scope_pass_test)
  2899. {
  2900. ScopePassManager passmanager;
  2901. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  2902. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  2903. EXPECT_NE(nullptr, scope_graph);
  2904. unique_ptr<ScopeBasePass> pass;
  2905. pass.reset(new ScopeTestPass());
  2906. EXPECT_EQ(domi::SUCCESS, passmanager.AddPass(pass));
  2907. scope_graph = passmanager.BuildScopeGraph(graph);
  2908. EXPECT_NE(nullptr, scope_graph);
  2909. delete graph;
  2910. }
  2911. TEST_F(STestTensorflowParser, operator_attr_set_and_get)
  2912. {
  2913. TestOperator test_operator;
  2914. test_operator.Name("test_op");
  2915. EXPECT_EQ("test_op" , test_operator.GetName());
  2916. test_operator.Input(test_operator, 0);
  2917. test_operator.Input(test_operator, 1);
  2918. test_operator.GetOpAttrs();
  2919. int64_t pad = 1;
  2920. test_operator.Attr("pad", pad);
  2921. EXPECT_EQ(pad , test_operator.GetIntAttr("pad"));
  2922. bool bool_value = true;
  2923. test_operator.Attr("bool_value", bool_value);
  2924. EXPECT_EQ(bool_value , test_operator.GetBoolAttr("bool_value"));
  2925. float float_value = true;
  2926. test_operator.Attr("float_value", float_value);
  2927. EXPECT_EQ(float_value , test_operator.GetFloatAttr("float_value"));
  2928. std::string str_value = "test_string";
  2929. test_operator.Attr("str_value", str_value);
  2930. EXPECT_EQ(str_value , test_operator.GetStringAttr("str_value"));
  2931. BoolTuple boollist_value{true, false};
  2932. test_operator.Attr("boollist_value", boollist_value);
  2933. BoolTuple get_boollist_value = test_operator.GetBoolTupleAttr("boollist_value");
  2934. EXPECT_EQ(boollist_value[0] , get_boollist_value[0]);
  2935. StringTuple strlist_value{"a", "b"};
  2936. test_operator.Attr("strlist_value", strlist_value);
  2937. StringTuple get_strlist_value = test_operator.GetStringTupleAttr("strlist_value");
  2938. EXPECT_EQ(strlist_value[0] , get_strlist_value[0]);
  2939. int64_t num = 1;
  2940. IntTuple intlist{num, num};
  2941. test_operator.Attr("intlist", intlist);
  2942. IntTuple get_intlist = test_operator.GetIntTupleAttr("intlist");
  2943. EXPECT_EQ(intlist[0] , get_intlist[0]);
  2944. FloatTuple floatlist{1.1, 1.1};
  2945. test_operator.Attr("floatlist", floatlist);
  2946. FloatTuple get_floatlist = test_operator.GetFloatTupleAttr("floatlist");
  2947. EXPECT_EQ(floatlist[0] , get_floatlist[0]);
  2948. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  2949. ParserOperator *op = &test_operator;
  2950. Status ret = ConvertToOpDesc(*op, op_desc);
  2951. EXPECT_EQ(domi::SUCCESS , ret);
  2952. TestOperator test_operator_1;
  2953. ParserOperator *op_convert = &test_operator_1;
  2954. ret = ConvertFromOpDesc(op_desc, *op_convert);
  2955. EXPECT_EQ(domi::SUCCESS , ret);
  2956. op_desc = nullptr;
  2957. ret = ConvertFromOpDesc(op_desc, *op_convert);
  2958. EXPECT_EQ(FAILED , ret);
  2959. ret = ConvertToOpDesc(*op, op_desc);
  2960. EXPECT_EQ(FAILED, ret);
  2961. }
  2962. TEST_F(STestTensorflowParser, success_frameworkop_get)
  2963. {
  2964. FrameworkOpOperator *frameworkOp=new FrameworkOpOperator();
  2965. int64_t index = 1;
  2966. std::string opdef_string = "tensorflow_parser";
  2967. frameworkOp->GetFrameworkType();
  2968. frameworkOp->GetNodeDefPkg();
  2969. frameworkOp->FuncDefPkg("func");
  2970. frameworkOp->Index(index);
  2971. frameworkOp->TfOpDef(opdef_string);
  2972. EXPECT_EQ(SUCCESS, SUCCESS);
  2973. delete frameworkOp;
  2974. }
  2975. TEST_F(STestTensorflowParser, op_set_get_success)
  2976. {
  2977. ConstantOperator op;
  2978. vector<int64_t> v;
  2979. op.VectorAttr("key", v);
  2980. op.GetDType();
  2981. }
  2982. TEST_F(STestTensorflowParser, success_argop_get)
  2983. {
  2984. ArgOpOperator *argOp=new ArgOpOperator();
  2985. int64_t index = 1;
  2986. argOp->Index(index);
  2987. argOp->GetIndex();
  2988. EXPECT_EQ(domi::SUCCESS, SUCCESS);
  2989. delete argOp;
  2990. }
  2991. TEST_F(STestTensorflowParser, success_operator)
  2992. {
  2993. ParserOperator tfOperator;
  2994. ParserOperator in_op;
  2995. uint32_t index = 0;
  2996. std::string type = "add";
  2997. std::string key = "Add";
  2998. std::vector<int64_t> value;
  2999. int64_t tmp = 0;
  3000. value.emplace_back(tmp);
  3001. tfOperator.Input(in_op, index);
  3002. tfOperator.Type(type);
  3003. tfOperator.AttrVector(key, value);
  3004. }
  3005. TEST_F(STestTensorflowParser, success_shapen_get)
  3006. {
  3007. ShapeNOperator *shapen =new ShapeNOperator();
  3008. shapen->GetInType();
  3009. shapen->GetInType();
  3010. shapen->GetOutType();
  3011. EXPECT_EQ(domi::SUCCESS, domi::SUCCESS);
  3012. delete shapen;
  3013. }
  3014. TEST_F(STestTensorflowParser, success_VarIsInitializedOpOperator_get)
  3015. {
  3016. VarIsInitializedOpOperator op;
  3017. op.Name("x");
  3018. std::vector<int64_t> value;
  3019. op.VectorAttr("key", value);
  3020. }
  3021. TEST_F(STestTensorflowParser, success_variable_op_get)
  3022. {
  3023. VariableOperator op;
  3024. uint32_t mem_type = 1;
  3025. op.Name("x");
  3026. std::vector<int64_t> value;
  3027. op.Placement("shared_name");
  3028. op.MemType(mem_type);
  3029. }
  3030. TEST_F(STestTensorflowParser, param_success_get)
  3031. {
  3032. FillOperator* fillOp=new FillOperator();
  3033. fillOp->GetDataType();
  3034. fillOp->GetAlpha();
  3035. fillOp->GetBeta();
  3036. EXPECT_EQ(domi::SUCCESS, domi::SUCCESS);
  3037. delete fillOp;
  3038. }
  3039. TEST_F(STestTensorflowParser, tensorflow_Message2Operator_ParseOperatorAttrs_test)
  3040. {
  3041. Message2Operator mess2Op;
  3042. tensorflow::NodeDef *node_def = initNodeDef();
  3043. int depth = 6;
  3044. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  3045. ge::Operator ops = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc);
  3046. Status ret = mess2Op.ParseOperatorAttrs(node_def, depth, ops);
  3047. EXPECT_EQ(ret, FAILED);
  3048. depth = 4;
  3049. ret = mess2Op.ParseOperatorAttrs(node_def, depth, ops);
  3050. EXPECT_EQ(ret, SUCCESS);
  3051. }
  3052. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_RepeatedEnum2Json_test)
  3053. {
  3054. Pb2Json toJson;
  3055. ProtobufEnumValueDescriptor *enum_value_desc = new google::protobuf::EnumValueDescriptor();
  3056. bool enum2str = true;
  3057. Json json;
  3058. ProtobufFieldDescriptor *field = nullptr;
  3059. toJson.RepeatedEnum2Json(enum_value_desc, enum2str, json);
  3060. toJson.Enum2Json(enum_value_desc, field, enum2str, json);
  3061. enum2str = false;
  3062. toJson.RepeatedEnum2Json(enum_value_desc, enum2str, json);
  3063. delete enum_value_desc;
  3064. }
  3065. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_TypeBytes2String_test)
  3066. {
  3067. Pb2Json toJson;
  3068. std::string field_name = "offset";
  3069. std::string type_bytes = "offset";
  3070. toJson.TypeBytes2String(field_name, type_bytes);
  3071. field_name = "test";
  3072. toJson.TypeBytes2String(field_name, type_bytes);
  3073. }
  3074. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_RepeatedMessage2Json_test)
  3075. {
  3076. Pb2Json toJson;
  3077. tensorflow::NodeDef *node_def = initNodeDef();
  3078. ProtobufFieldDescriptor *field = new google::protobuf::FieldDescriptor();
  3079. ProtobufReflection *reflection = nullptr;
  3080. set<string> black_fields;
  3081. black_fields.emplace("offset");
  3082. Json json;
  3083. bool enum2str = true;
  3084. toJson.RepeatedMessage2Json((*node_def), field, reflection, black_fields, json, enum2str);
  3085. delete field;
  3086. }
  3087. TEST_F(STestTensorflowParser, tensorflow_Pb2Json_OneField2Json_test)
  3088. {
  3089. Pb2Json toJson;
  3090. tensorflow::NodeDef *node_def = initNodeDef();
  3091. ProtobufFieldDescriptor *field = new google::protobuf::FieldDescriptor();
  3092. ProtobufReflection *reflection = nullptr;
  3093. set<string> black_fields;
  3094. black_fields.emplace("offset");
  3095. Json json;
  3096. bool enum2str = true;
  3097. Message2Operator mess2Op;
  3098. int depth = 4;
  3099. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>("FusionCustom", "FusionCustom");
  3100. ge::Operator ops = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc);
  3101. field->CppTypeName(google::protobuf::FieldDescriptor::CPPTYPE_ENUM);
  3102. mess2Op.ParseField(reflection, node_def, field, depth, ops);
  3103. toJson.OneField2Json((*node_def), field, reflection, black_fields, json, enum2str, 1);
  3104. toJson.OneField2Json((*node_def), field, reflection, black_fields, json, enum2str, 5);
  3105. delete field;
  3106. }
  3107. TEST_F(STestTensorflowParser, input_proto_real_path_success) {
  3108. const char *caffe_proto_path = "./caffe/caffe.proto";
  3109. const char *custom_proto_path = "./caffe/custom.proto";
  3110. ProtoFileParser proto_file_parser;
  3111. string fusion_proto_file;
  3112. auto ret = proto_file_parser.CombineProtoFile(caffe_proto_path, custom_proto_path, fusion_proto_file);
  3113. EXPECT_EQ(ret, FAILED);
  3114. ret = proto_file_parser.RecordProtoMessage(caffe_proto_path);
  3115. EXPECT_EQ(ret, FAILED);
  3116. ret = proto_file_parser.WriteProtoFile(caffe_proto_path, custom_proto_path);
  3117. EXPECT_EQ(ret, FAILED);
  3118. std::cout << __FILE__ << std::endl;
  3119. std::string caseDir = __FILE__;
  3120. std::size_t idx = caseDir.find_last_of("/");
  3121. caseDir = caseDir.substr(0, idx);
  3122. std::string proto_file = caseDir + "/origin_models/caffe.proto";
  3123. caffe_proto_path = proto_file.c_str();
  3124. ret = proto_file_parser.CombineProtoFile(caffe_proto_path, caffe_proto_path, fusion_proto_file);
  3125. EXPECT_EQ(ret, SUCCESS);
  3126. ret = proto_file_parser.WriteProtoFile(caffe_proto_path, custom_proto_path);
  3127. EXPECT_EQ(ret, FAILED);
  3128. std::string dest_line = "test";
  3129. ret = proto_file_parser.FindConflictLine(custom_proto_path, 0, dest_line);
  3130. EXPECT_EQ(ret, FAILED);
  3131. std::map<int, std::pair<string, string>> identifier_op_map;
  3132. std::map<std::string, std::pair<int, string>> op_identifier_map;
  3133. ret = proto_file_parser.ParseProtoFile(custom_proto_path, identifier_op_map, op_identifier_map);
  3134. EXPECT_EQ(ret, FAILED);
  3135. proto_file_parser.GetFusionProtoFile();
  3136. std::ofstream write_tmp;
  3137. ret = proto_file_parser.AddCustomAndConflictMessage(custom_proto_path, write_tmp);
  3138. EXPECT_EQ(ret, FAILED);
  3139. }
  3140. TEST_F(STestTensorflowParser, all_success)
  3141. {
  3142. PreChecker::OpId id1 = (void*)(intptr_t)1;
  3143. PreChecker::OpId id2 = (void*)(intptr_t)2;
  3144. PreChecker::OpId id3 = (void*)(intptr_t)3;
  3145. PreChecker::OpId id4 = (void*)(intptr_t)4;
  3146. PreChecker &checker = PreChecker::Instance();
  3147. EXPECT_EQ(checker.AddOp(id1, "name1", "type1"), SUCCESS);
  3148. EXPECT_EQ(checker.AddOp(id2, "name2", "type2"), SUCCESS);
  3149. EXPECT_EQ(checker.AddOp(id3, "name1", "type3"), SUCCESS);
  3150. EXPECT_EQ(checker.AddOp(id4, "name4", ge::parser::DETECTIONOUTPUT), SUCCESS);
  3151. EXPECT_EQ(checker.CheckName(id1), SUCCESS);
  3152. EXPECT_EQ(checker.CheckName(id2), SUCCESS);
  3153. EXPECT_EQ(checker.CheckName(id3), SUCCESS);
  3154. EXPECT_EQ(checker.CheckName(id4), SUCCESS);
  3155. EXPECT_EQ(checker.CheckType(id1), SUCCESS);
  3156. EXPECT_EQ(checker.CheckType(id2), SUCCESS);
  3157. EXPECT_EQ(checker.CheckType(id3), SUCCESS);
  3158. EXPECT_EQ(checker.CheckType(id4), SUCCESS);
  3159. EXPECT_EQ(checker.AddCause(id1, PreChecker::ErrorCode::OK, "msg"), SUCCESS);
  3160. EXPECT_EQ(checker.AddCause(id1, PreChecker::ErrorCode::PARAM_INVALID, "msg"), domi::SUCCESS);
  3161. PreChecker::Cause cause;
  3162. cause.code = PreChecker::ErrorCode::TYPE_AMBIGUOUS;
  3163. cause.message = "msg";
  3164. EXPECT_EQ(checker.AddCause(id1, cause), SUCCESS);
  3165. EXPECT_EQ(checker.HasError(), true);
  3166. EXPECT_EQ(checker.Save("check_result.json"), SUCCESS);
  3167. std::string msg = "msg";
  3168. Status ret = checker.Clear(id1, msg);
  3169. EXPECT_EQ(ret, SUCCESS);
  3170. checker.Clear();
  3171. checker.RefreshErrorMessageByName("name1",PreChecker::ErrorCode::PARAM_INVALID,"node repeated in");
  3172. }
  3173. TEST_F(STestTensorflowParser, tensorflow_tbe_tfplugin_loader_test)
  3174. {
  3175. TBEPluginLoader pluginLoad;
  3176. vector<string> fileList = {};
  3177. string caffeParserPath = "";
  3178. string full_name = "dabc";
  3179. string caffe_parser_so_suff = "abc";
  3180. pluginLoad.ProcessSoFullName(fileList, caffeParserPath, full_name, caffe_parser_so_suff);
  3181. ASSERT_EQ(caffeParserPath, full_name);
  3182. pluginLoad.ClearHandles_();
  3183. std::cout << __FILE__ << std::endl;
  3184. std::string caseDir = __FILE__;
  3185. std::size_t idx = caseDir.find_last_of("/");
  3186. caseDir = caseDir.substr(0, idx);
  3187. std::string proto_file = caseDir + "/origin_models/";
  3188. std::string path = proto_file;
  3189. std::string caffe_parser_path = path;
  3190. pluginLoad.FindParserSo(path, fileList, caffe_parser_path);
  3191. setenv("ASCEND_OPP_PATH", "aaa", 1);
  3192. std::string customop_path = "";
  3193. pluginLoad.GetCustomOpPath(customop_path);
  3194. ASSERT_EQ(customop_path, "aaa/framework/custom/:aaa/framework/built-in/tensorflow");
  3195. Status ret = pluginLoad.Finalize();
  3196. EXPECT_EQ(ret, SUCCESS);
  3197. }
  3198. TEST_F(STestTensorflowParser, tensorflow_data_op_parser_test)
  3199. {
  3200. std::vector<int64_t> shape = {1, 1, 224, 224};
  3201. ge::GeTensorDesc tensor_desc;
  3202. DataOpParser opParser;
  3203. Status ret = opParser.Init5DInputTensor(shape, tensor_desc);
  3204. EXPECT_EQ(ret, SUCCESS);
  3205. ret = opParser.Init5DOutputTensor(shape, tensor_desc);
  3206. EXPECT_EQ(ret, SUCCESS);
  3207. ge::OpDescPtr op = std::make_shared<ge::OpDesc>();
  3208. ret = opParser.ParseShape(shape, op);
  3209. }
  3210. TEST_F(STestTensorflowParser, read_proto_from_mem_test)
  3211. {
  3212. tensorflow::NodeDef *node_def = initNodeDef();
  3213. const char *data = nullptr;
  3214. int size = 3;
  3215. bool ret = parser::ReadProtoFromMem(data, size, node_def);
  3216. EXPECT_EQ(false, ret);
  3217. data = "not file";
  3218. ret = parser::ReadProtoFromMem(data, size, node_def);
  3219. EXPECT_EQ(false, ret);
  3220. }
  3221. TEST_F(STestTensorflowParser, tensorflow_GetOriginalType_test)
  3222. {
  3223. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  3224. ge::OpDescPtr op = std::make_shared<ge::OpDesc>("fusionCustom", parser::FRAMEWORKOP);
  3225. ge::NodePtr node = std::make_shared<ge::Node>(op, graph);
  3226. string type = parser::FRAMEWORKOP;
  3227. Status ret = parser::GetOriginalType(node, type);
  3228. EXPECT_EQ(ret, INTERNAL_ERROR);
  3229. }
  3230. TEST_F(STestTensorflowParser, tensorflow_ReadBytesFromBinaryFile_test)
  3231. {
  3232. const char *file_name = nullptr;
  3233. char *buffer = nullptr;
  3234. int length = 1;
  3235. bool ret = parser::ReadBytesFromBinaryFile(file_name, &buffer, length);
  3236. EXPECT_EQ(ret, false);
  3237. file_name = "./caffe.proto";
  3238. ret = parser::ReadBytesFromBinaryFile(file_name, &buffer, length);
  3239. EXPECT_EQ(ret, false);
  3240. std::cout << __FILE__ << std::endl;
  3241. std::string caseDir = __FILE__;
  3242. std::size_t idx = caseDir.find_last_of("/");
  3243. caseDir = caseDir.substr(0, idx);
  3244. std::string proto_file = caseDir + "/origin_models/caffe.proto";
  3245. file_name = proto_file.c_str();
  3246. ret = parser::ReadBytesFromBinaryFile(file_name, &buffer, length);
  3247. EXPECT_EQ(ret, true);
  3248. char path[4096 + 1] = { 0 };
  3249. memset(path, 'a', 4096);
  3250. std::string realPath = parser::RealPath(path);
  3251. EXPECT_EQ(realPath, "");
  3252. const char *real_path = nullptr;
  3253. realPath = parser::RealPath(real_path);
  3254. EXPECT_EQ(realPath, "");
  3255. }
  3256. TEST_F(STestTensorflowParser, tensorflow_AclGrphParseUtil_ParseAclInputFp16Nodes_test)
  3257. {
  3258. AclGrphParseUtil parserUtil;
  3259. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  3260. std::string input_fp16_nodes = "Add";
  3261. std::string is_input_adjust_hw_layout = "is_input_adjust_hw_layout";
  3262. Status ret = parserUtil.ParseAclInputFp16Nodes(graph, input_fp16_nodes, is_input_adjust_hw_layout);
  3263. EXPECT_EQ(ret, PARAM_INVALID);
  3264. is_input_adjust_hw_layout = "true";
  3265. ret = parserUtil.ParseAclInputFp16Nodes(graph, input_fp16_nodes, is_input_adjust_hw_layout);
  3266. EXPECT_EQ(ret, PARAM_INVALID);
  3267. vector<string> adjust_fp16_format_vec = {"true", "false"};
  3268. uint32_t index = 1;
  3269. ge::OpDescPtr op_desc = std::make_shared<ge::OpDesc>();
  3270. parserUtil.AddAttrsForInputNodes(adjust_fp16_format_vec, input_fp16_nodes, index, op_desc);
  3271. std::string is_output_fp16 = "is_output_fp16";
  3272. ret = parserUtil.ParseAclOutputFp16NodesFormat(is_output_fp16);
  3273. EXPECT_EQ(ret, PARAM_INVALID);
  3274. is_output_fp16 = "false";
  3275. ret = parserUtil.ParseAclOutputFp16NodesFormat(is_output_fp16);
  3276. EXPECT_EQ(ret, SUCCESS);
  3277. is_output_fp16 = "true";
  3278. ret = parserUtil.ParseAclOutputFp16NodesFormat(is_output_fp16);
  3279. EXPECT_EQ(ret, SUCCESS);
  3280. }
  3281. TEST_F(STestTensorflowParser, tensorflow_ModelSaver_test)
  3282. {
  3283. const char *file_path = nullptr;
  3284. const Json model = {{"a", "b"}};
  3285. Status ret = ge::parser::ModelSaver::SaveJsonToFile(file_path, model);
  3286. EXPECT_EQ(ret, FAILED);
  3287. file_path = "./origin_models/";
  3288. ret = ge::parser::ModelSaver::SaveJsonToFile(file_path, model);
  3289. EXPECT_EQ(ret, FAILED);
  3290. std::string caseDir = __FILE__;
  3291. std::size_t idx = caseDir.find_last_of("/");
  3292. caseDir = caseDir.substr(0, idx);
  3293. std::string proto_file = caseDir + "/origin_models/caffe.proto";
  3294. file_path = proto_file.c_str();
  3295. ret = ge::parser::ModelSaver::SaveJsonToFile(file_path, model);
  3296. char path[4096 + 1] = { 0 };
  3297. memset(path, 'a', 4096);
  3298. EXPECT_EQ(-1, ge::parser::ModelSaver::CreateDirectory(path));
  3299. EXPECT_EQ(-1, ge::parser::ModelSaver::CheckPath(path));
  3300. }
  3301. TEST_F(STestTensorflowParser, create_weights_parser_failed)
  3302. {
  3303. WeightsParserFactory* factory = WeightsParserFactory::Instance();
  3304. shared_ptr<WeightsParser> weight_parser = factory->CreateWeightsParser(FRAMEWORK_RESERVED);
  3305. ASSERT_TRUE(NULL == weight_parser);
  3306. ModelParserFactory *modelFactory = ModelParserFactory::Instance();
  3307. shared_ptr<ModelParser> model_parser = modelFactory->CreateModelParser(FRAMEWORK_RESERVED);
  3308. ASSERT_TRUE(NULL == model_parser);
  3309. std::shared_ptr<OpParserFactory> parserFactory = OpParserFactory::Instance(domi::FrameworkType::CAFFE);
  3310. std::shared_ptr<OpParser> fusion_op_parser = parserFactory->CreateFusionOpParser(ge::parser::DATA);
  3311. ASSERT_TRUE(NULL == fusion_op_parser);
  3312. std::shared_ptr<OpParser> op_parser = parserFactory->CreateOpParser("10");
  3313. ASSERT_TRUE(NULL == op_parser);
  3314. }
  3315. TEST_F(STestTensorflowParser, custom_parser_adapter_register)
  3316. {
  3317. using PARSER_CREATOR_FN = std::function<std::shared_ptr<OpParser>(void)>;
  3318. PARSER_CREATOR_FN func = CustomParserAdapterRegistry::Instance()->GetCreateFunc(domi::TENSORFLOW);
  3319. CustomParserAdapterRegistry::Instance()->Register(domi::TENSORFLOW, func);
  3320. CustomParserAdapterRegistry::Instance()->Register(domi::TENSORFLOW, func);
  3321. func = CustomParserAdapterRegistry::Instance()->GetCreateFunc(domi::FRAMEWORK_RESERVED);
  3322. ASSERT_EQ(nullptr, func);
  3323. }
  3324. TEST_F(STestTensorflowParser, tensorflow_parser_api_test)
  3325. {
  3326. std::map<std::string, std::string> options = {{"ge.runFlag", "1"}};
  3327. Status ret = ParserInitialize(options);
  3328. EXPECT_EQ(ret, SUCCESS);
  3329. ret = ParserInitialize(options);
  3330. EXPECT_EQ(ret, SUCCESS);
  3331. ret = ParserFinalize();
  3332. EXPECT_EQ(ret, SUCCESS);
  3333. ret = ParserFinalize();
  3334. EXPECT_EQ(ret, SUCCESS);
  3335. }
  3336. TEST_F(STestTensorflowParser, tensorflow_FP16_parser_test)
  3337. {
  3338. parser::fp16_t fp16;
  3339. fp16.ToDouble();
  3340. fp16.ToInt8();
  3341. fp16.ToUInt8();
  3342. fp16.ToInt16();
  3343. fp16.ToUInt16();
  3344. fp16.ToInt32();
  3345. fp16.ToUInt32();
  3346. fp16.IsInf();
  3347. fp16.operator+(fp16);
  3348. fp16.operator-(fp16);
  3349. fp16.operator*(fp16);
  3350. fp16.operator/(fp16);
  3351. fp16.operator+=(fp16);
  3352. fp16.operator-=(fp16);
  3353. fp16.operator*=(fp16);
  3354. fp16.operator/=(fp16);
  3355. fp16.operator==(fp16);
  3356. fp16.operator!=(fp16);
  3357. fp16.operator>(fp16);
  3358. fp16.operator>=(fp16);
  3359. fp16.operator<(fp16);
  3360. fp16.operator<=(fp16);
  3361. fp16.operator=(fp16);
  3362. float f_val = 0.1;
  3363. fp16.operator=(f_val);
  3364. double d_val = 0.2;
  3365. fp16.operator=(d_val);
  3366. int8_t i_val = 1;
  3367. fp16.operator=(i_val);
  3368. uint8_t ui_val = 2;
  3369. fp16.operator=(ui_val);
  3370. int16_t i_vals = 1;
  3371. fp16.operator=(i_vals);
  3372. uint16_t ui16_val = 1;
  3373. fp16.operator=(ui16_val);
  3374. ui16_val = 0;
  3375. fp16.operator=(ui16_val);
  3376. ui16_val = 1;
  3377. fp16.operator=(ui16_val);
  3378. int32_t i32_val = 0;
  3379. fp16.operator=(i32_val);
  3380. i32_val = 1;
  3381. fp16.operator=(i32_val);
  3382. uint32_t ui32_val = 0;
  3383. fp16.operator=(ui32_val);
  3384. ui32_val = 1;
  3385. fp16.operator=(ui32_val);
  3386. float f_val1= 2139095000.2;
  3387. ge::parser::fp16_t fp16_1,fp16_2;
  3388. fp16_1.operator=(fp16_2);
  3389. fp16_1.operator=(f_val1);
  3390. float f_val2= 0.0000112;
  3391. fp16_1.operator=(f_val2);
  3392. float f_val3= 0.0000000299;
  3393. fp16_1.operator=(f_val3);
  3394. float f_val4= 0.00000000299;
  3395. fp16_1.operator=(f_val4);
  3396. uint32_t u_val1 = 4095;
  3397. fp16_1.operator=(u_val1);
  3398. uint16_t u16_val1 = 4095;
  3399. fp16_1.operator=(u16_val1);
  3400. int16_t int_val1 = 0;
  3401. fp16_1.operator=(int_val1);
  3402. int16_t int_val2 = -32767;
  3403. fp16_1.operator=(int_val2);
  3404. i_val = -0x7FFFFFFF;
  3405. fp16_1.operator=(i_val);
  3406. fp16.operator=(f_val1);
  3407. float f = fp16; //float();
  3408. double d = fp16;
  3409. int8_t int8 = fp16;
  3410. uint8_t uint8 = fp16;
  3411. uint16_t uint16 = fp16;
  3412. int32_t int32 = fp16;
  3413. uint32_t uint32 = fp16;
  3414. int64_t int64 = fp16;
  3415. uint64_t uint64 = fp16;
  3416. (void)f;
  3417. (void)d;
  3418. (void)int8;
  3419. (void)uint8;
  3420. (void)uint8;
  3421. (void)uint16;
  3422. (void)int32;
  3423. (void)uint32;
  3424. (void)int64;
  3425. (void)uint64;
  3426. parser::fp16_t val;
  3427. val.val = 0x7C00;
  3428. val.IsInf();
  3429. val.val = 0xFC00;
  3430. val.IsInf();
  3431. parser::fp16_t fp16_3, fp16_4;
  3432. fp16_3.val = 1;
  3433. fp16_4.val = 2;
  3434. fp16_4.operator/(fp16_3);
  3435. fp16.val = 21504;
  3436. int16_t int16 = fp16;
  3437. int8 = fp16;
  3438. }
  3439. TEST_F(STestTensorflowParser, tensorflow_AclParserInitialize_test)
  3440. {
  3441. AclGrphParseUtil parseUtil;
  3442. std::map<std::string, std::string> options;
  3443. Status ret = parseUtil.AclParserInitialize(options);
  3444. EXPECT_EQ(ret, FAILED);
  3445. options = {{ge::FRAMEWORK_TYPE, "2"}};
  3446. ret = parseUtil.AclParserInitialize(options);
  3447. EXPECT_EQ(ret, SUCCESS);
  3448. }
  3449. TEST_F(STestTensorflowParser, tensorflow_GetOutputLeaf_test)
  3450. {
  3451. AclGrphParseUtil parseUtil;
  3452. ge::ComputeGraphPtr compute_graph = build_graph(true);
  3453. ge::NodePtr output_nodes_info = compute_graph->FindNode("Relu3");
  3454. std::vector<std::pair<ge::NodePtr, int32_t>> output_nodes = {{output_nodes_info,0}};
  3455. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>("default");
  3456. ge::NodePtr node = AddNode(compute_graph, "K", parser::NETOUTPUT,1,1);
  3457. Status ret = parseUtil.GetOutputLeaf(node, output_nodes);
  3458. EXPECT_EQ(ret, FAILED);
  3459. }
  3460. TEST_F(STestTensorflowParser, graph_pass_error)
  3461. {
  3462. ComputeGraphPtr graph = std::make_shared<ComputeGraph>("test");
  3463. ErrorGraphPass pass;
  3464. ge::parser::PassManager passManager;
  3465. std::vector<std::pair<string, GraphPass*>> passes;
  3466. passes.emplace_back("", &pass);
  3467. Status status = passManager.Run(graph, passes);
  3468. EXPECT_EQ(domi::FAILED, status);
  3469. }
  3470. TEST_F(STestTensorflowParser, parser_FindFmkNodeCluser_success)
  3471. {
  3472. ComputeGraphPtr graph = std::make_shared<ComputeGraph>("FrameworkOp");
  3473. ParserGraphOptimizer graphOptimizer(graph, domi::TENSORFLOW);
  3474. ge::NodePtr node = AddNode(graph, "K", parser::FRAMEWORK_OP_TYPE, 1, 1);
  3475. ge::NodePtr output_nodes_info = graph->FindNode("Relu3");
  3476. std::unordered_map<string, vector<NodePtr>> node_cluser_Map({
  3477. {"x", {node, output_nodes_info}},
  3478. });
  3479. Status ret = graphOptimizer.FindFmkNodeCluser(node_cluser_Map);
  3480. EXPECT_EQ(ret, SUCCESS);
  3481. }
  3482. TEST_F(STestTensorflowParser, parser_RebuildOutputAnchors_test)
  3483. {
  3484. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3485. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3486. string inputNodeType = "DATA";
  3487. MakeDagGraph(subGraph, inputNodeType);
  3488. vector<ge::InDataAnchorPtr> in_anchor;
  3489. vector<ge::OutDataAnchorPtr> out_anchor;
  3490. for(ge::NodePtr node : subGraph->GetAllNodes()) {
  3491. for(auto out : node->GetAllOutDataAnchors()) {
  3492. for(auto in : node->GetAllInDataAnchors()) {
  3493. if(in->GetPeerOutAnchor() != nullptr && in->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc()->GetType() == parser::DATA) {
  3494. in_anchor.push_back(in);
  3495. }
  3496. }
  3497. for(auto i : out->GetPeerInDataAnchors()) {
  3498. if(i->GetOwnerNode()->GetOpDesc()->GetType() == parser::NETOUTPUT) {
  3499. out_anchor.push_back(out);
  3500. }
  3501. }
  3502. }
  3503. }
  3504. OpDescPtr fusion_op_desc = make_shared<ge::OpDesc>("FusionCustom", ge::parser::CONSTANT);
  3505. Status ret = graphOptimizer.RebuildOutputAnchors(out_anchor, fusion_op_desc);
  3506. EXPECT_EQ(domi::SUCCESS, ret);
  3507. ret = graphOptimizer.RebuildInputAnchors(in_anchor, fusion_op_desc);
  3508. EXPECT_EQ(domi::SUCCESS, ret);
  3509. }
  3510. TEST_F(STestTensorflowParser, parser_LinkInnerAnchor_test)
  3511. {
  3512. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3513. NodePtr node_a = AddNode(subGraph, "A", parser::NETOUTPUT, 1, 1);
  3514. NodePtr node_b = AddNode(subGraph, "B", parser::NETOUTPUT, 1, 1);
  3515. unordered_map<string, ge::NodePtr> node_map;
  3516. node_map.insert(pair<string, ge::NodePtr>("A", node_a));
  3517. node_map.insert(pair<string, ge::NodePtr>("B", node_b));
  3518. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3519. graphOptimizer.LinkInnerAnchor(node_map);
  3520. }
  3521. TEST_F(STestTensorflowParser, parser_MarkForFusion_test)
  3522. {
  3523. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3524. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3525. ge::NodePtr node = AddNode(subGraph, "K", parser::FRAMEWORK_OP_TYPE, 1, 1);
  3526. ge::NodePtr output_nodes_info = subGraph->FindNode("Relu3");
  3527. std::unordered_map<string, vector<NodePtr>> node_cluser_Map({
  3528. {"x", {node, output_nodes_info}},
  3529. });
  3530. Status ret = graphOptimizer.MarkForFusion(node_cluser_Map);
  3531. EXPECT_EQ(ret, INTERNAL_ERROR);
  3532. }
  3533. TEST_F(STestTensorflowParser, parser_UpdateGraph_test)
  3534. {
  3535. std::vector<NodePtr> nodes;
  3536. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3537. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3538. NodePtr node_a = AddNode(subGraph, "A", parser::NETOUTPUT, 1, 1);
  3539. NodePtr node_b = AddNode(subGraph, "B", parser::NETOUTPUT, 1, 1);
  3540. nodes.emplace_back(node_a);
  3541. nodes.emplace_back(node_b);
  3542. Status ret = graphOptimizer.UpdateGraph(nodes);
  3543. EXPECT_EQ(ret, PARAM_INVALID);
  3544. }
  3545. TEST_F(STestTensorflowParser, parser_RebuildFusionNode_test)
  3546. {
  3547. ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME);
  3548. ParserGraphOptimizer graphOptimizer(graph, domi::TENSORFLOW);
  3549. string inputNodeType = "DATA";
  3550. MakeDagGraph(graph, inputNodeType);
  3551. vector<ge::InDataAnchorPtr> input_anchors;
  3552. vector<ge::OutDataAnchorPtr> output_anchors;
  3553. for(ge::NodePtr node : graph->GetAllNodes()) {
  3554. for(auto out : node->GetAllOutDataAnchors()) {
  3555. for(auto in : node->GetAllInDataAnchors()) {
  3556. if(in->GetPeerOutAnchor() != nullptr && in->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc()->GetType() == parser::DATA) {
  3557. input_anchors.push_back(in);
  3558. }
  3559. }
  3560. for(auto i : out->GetPeerInDataAnchors()) {
  3561. if(i->GetOwnerNode()->GetOpDesc()->GetType() == parser::NETOUTPUT) {
  3562. output_anchors.push_back(out);
  3563. }
  3564. }
  3565. }
  3566. }
  3567. map<ge::OutDataAnchorPtr, vector<ge::InDataAnchorPtr>> output_in_map;
  3568. vector<ge::InControlAnchorPtr> input_control_anchors;
  3569. vector<ge::OutControlAnchorPtr> output_control_anchors;
  3570. ge::OpDescPtr op = std::make_shared<ge::OpDesc>("dpop_123", "FrameworkOp");
  3571. ge::NodePtr fusion_node = std::make_shared<ge::Node>(op, graph);
  3572. Status ret = graphOptimizer.RebuildFusionNode(input_anchors, output_anchors, output_in_map, input_control_anchors, output_control_anchors, fusion_node);
  3573. EXPECT_EQ(ret, FAILED);
  3574. }
  3575. TEST_F(STestTensorflowParser, parser_InsertNode_test)
  3576. {
  3577. std::vector<NodePtr> nodes;
  3578. ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default");
  3579. ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW);
  3580. auto merge_node = AddNode(subGraph, "Merge", parser::MERGE, 1, 2);
  3581. auto node1 = AddNode(subGraph, "Op1", parser::RELU, 1, 1);
  3582. auto node2 = AddNode(subGraph, "Op2", parser::CONVOLUTION, 1, 1);
  3583. auto node3 = AddNode(subGraph, "Op3", parser::CONVOLUTION, 1, 1);
  3584. nodes.emplace_back(merge_node);
  3585. nodes.emplace_back(node1);
  3586. nodes.emplace_back(node2);
  3587. nodes.emplace_back(node3);
  3588. vector<ge::InDataAnchorPtr> in_anchor;
  3589. vector<ge::OutDataAnchorPtr> out_anchor;
  3590. map<ge::OutDataAnchorPtr, vector<ge::InDataAnchorPtr>> output_in_map;
  3591. vector<ge::InControlAnchorPtr> input_control_anchors;
  3592. vector<ge::OutControlAnchorPtr> output_control_anchors;
  3593. unordered_map<string, ge::NodePtr> node_map;
  3594. node_map.insert(pair<string, ge::NodePtr>("A", merge_node));
  3595. node_map.insert(pair<string, ge::NodePtr>("B", node1));
  3596. node_map.insert(pair<string, ge::NodePtr>("C", node2));
  3597. node_map.insert(pair<string, ge::NodePtr>("D", node3));
  3598. Status ret = graphOptimizer.InsertNode(subGraph, nodes, in_anchor, out_anchor, output_in_map, input_control_anchors, output_control_anchors, node_map);
  3599. EXPECT_EQ(ret, PARAM_INVALID);
  3600. }
  3601. TEST_F(STestTensorflowParser, parser_GeStoi_test)
  3602. {
  3603. TensorFlowModelParser model_parser;
  3604. string input_node_name = "dynamic_rnn_node1";
  3605. string index_str = "dynamic_rnn";
  3606. int32_t index = 0;
  3607. Status ret = model_parser.GeStoi(input_node_name, index_str, &index);
  3608. EXPECT_EQ(ret, INTERNAL_ERROR);
  3609. }
  3610. TEST_F(STestTensorflowParser, parser_ConstOpNeedUpdate_test)
  3611. {
  3612. ge::TensorFlowModelParser tensorflow_parser;
  3613. NodeDef *op_node_def = new NodeDef();
  3614. op_node_def->set_name("OP");
  3615. op_node_def->add_input("OP/Input_1");
  3616. op_node_def->set_op(TENSORFLOWF_NODE_OP_CONST);
  3617. NodeDef *input_node = new NodeDef();
  3618. input_node->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3619. input_node->add_input("OP/Input_1/Input_2");
  3620. NodeDef *input_2 = new NodeDef();
  3621. input_2->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3622. tensorflow_parser.nodedef_map_["OP"] = op_node_def;
  3623. tensorflow_parser.nodedef_map_["OP/Input_1"] = input_node;
  3624. tensorflow_parser.nodedef_map_["OP/Input_1/Input_2"] = input_2;
  3625. std::string op_name = "OP/Input_1/Input_2";
  3626. Status ret = tensorflow_parser.ConstOpNeedUpdate(op_name);
  3627. EXPECT_EQ(ret, true);
  3628. op_name = "OP";
  3629. ret = tensorflow_parser.ConstOpNeedUpdate(op_name);
  3630. EXPECT_EQ(ret, true);
  3631. delete op_node_def;
  3632. delete input_node;
  3633. delete input_2;
  3634. }
  3635. TEST_F(STestTensorflowParser, parser_UppdateInputMap_test)
  3636. {
  3637. ge::TensorFlowModelParser tensorflow_parser;
  3638. ScopeFusionOpInfo info;
  3639. ge::OpNodeContext normal_op_node_context;
  3640. ge::OpNodeContext fusion_op_node_context;
  3641. string fusion_op_name = "dropout";
  3642. normal_op_node_context.input_map["dropout"].push_back({0, 0});
  3643. normal_op_node_context.input_map["conv_conv5/BatchNorm/moving_variance"].push_back({0, 1});
  3644. normal_op_node_context.output_map["dropout"].push_back({1, 0});
  3645. normal_op_node_context.output_map["conv_conv5/BatchNorm/batchnorm/add/y"].push_back({-1, -1});
  3646. tensorflow::GraphDef *graph = new tensorflow::GraphDef();
  3647. ScopePassManager passmanager;
  3648. shared_ptr<ScopeGraph> scope_graph = passmanager.BuildScopeGraph(graph);
  3649. NodeDef *node1 = graph->add_node();
  3650. node1->set_name("dropout");
  3651. node1->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3652. node1->add_input("conv_conv5/BatchNorm/moving_variance");
  3653. node1->add_input("conv_conv5/BatchNorm/batchnorm/add/y");
  3654. NodeDef *node2 = graph->add_node();
  3655. node2->set_name("conv_conv5/BatchNorm/moving_variance");
  3656. node2->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3657. NodeDef *node3 = graph->add_node();
  3658. node3->set_name("conv_conv5/BatchNorm/batchnorm/add/y");
  3659. node3->set_op(TENSORFLOWF_NODE_OP_IDENTITY);
  3660. info.fusion_node_name = "conv_conv5/BatchNorm/batchnorm";
  3661. info.fusion_op_type = parser::FUSIONBATCHNORM;
  3662. info.node_name = "conv_conv5/BatchNorm/batchnorm/add";
  3663. info.description = "";
  3664. info.scope_pass = true;
  3665. tensorflow_parser.nodedef_map_["dropout"] = node1;
  3666. tensorflow_parser.nodedef_map_["conv_conv5/BatchNorm/moving_variance"] = node2;
  3667. tensorflow_parser.nodedef_map_["conv_conv5/BatchNorm/batchnorm/add/y"] = node3;
  3668. Status ret = tensorflow_parser.UppdateInputMap(scope_graph, info, fusion_op_node_context, normal_op_node_context);
  3669. EXPECT_EQ(ret, domi::SUCCESS);
  3670. ret = tensorflow_parser.UppdateOutputMap(scope_graph, info, fusion_op_node_context, normal_op_node_context);
  3671. TensorFlowWeightsParser weights_parser;
  3672. std::string caseDir = __FILE__;
  3673. std::size_t idx = caseDir.find_last_of("/");
  3674. caseDir = caseDir.substr(0, idx);
  3675. std::string proto_file = caseDir + "/origin_models/tf_add.pb";
  3676. const char *file = proto_file.c_str();
  3677. ge::Graph graphs;
  3678. Status weightsRet = weights_parser.Parse(file, graphs);
  3679. EXPECT_EQ(weightsRet, SUCCESS);
  3680. delete graph;
  3681. }
  3682. TEST_F(STestTensorflowParser, tensorflow_optimizer_fmk_fusion_op) {
  3683. std::string caseDir = __FILE__;
  3684. std::size_t idx = caseDir.find_last_of("/");
  3685. caseDir = caseDir.substr(0, idx);
  3686. const std::string root_proto = caseDir + "/origin_models/test_getnext_dynamic_fusion.pbtxt";
  3687. domi::tensorflow::GraphDef graphDef;
  3688. bool protoRet = parser::ReadProtoFromText(root_proto.c_str(), &graphDef);
  3689. ASSERT_EQ(protoRet, true);
  3690. TensorFlowModelParser tensorflow_parser;
  3691. ge::ComputeGraphPtr root_graph = ge::parser::MakeShared<ge::ComputeGraph>("tmp_graph");
  3692. Status ret = tensorflow_parser.ParseProto(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
  3693. EXPECT_EQ(ret, SUCCESS);
  3694. EXPECT_EQ(root_graph->GetDirectNode().size(), 3);
  3695. }
  3696. TEST_F(STestTensorflowParser, AddDumpOriginName_test)
  3697. {
  3698. GeTensorDesc scalar_tensor(GeShape(), ge::FORMAT_NCHW, ge::DT_FLOAT);
  3699. ge::ComputeGraphPtr parent_graph = std::make_shared<ge::ComputeGraph>("parent_graph");
  3700. ge::OpDescPtr parent = std::make_shared<ge::OpDesc>();
  3701. parent->SetType("Foo");
  3702. parent->SetName("foo");
  3703. ge::NodePtr foo = parent_graph->AddNode(parent);
  3704. ge::ComputeGraphPtr sub_graph = std::make_shared<ge::ComputeGraph>("sub_graph");
  3705. auto child = std::make_shared<ge::OpDesc>();
  3706. child->SetType("Bar");
  3707. child->SetName("bar");
  3708. ge::NodePtr bar = sub_graph->AddNode(child);
  3709. AddDumpOriginName(foo, "f", sub_graph);
  3710. std::vector<std::string> original_names;
  3711. (void)ge::AttrUtils::GetListStr(bar->GetOpDesc(), ge::ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, original_names);
  3712. EXPECT_EQ(original_names.size(), 1U);
  3713. EXPECT_EQ(original_names[0], "foo/f/bar");
  3714. (void)ge::AttrUtils::SetListStr(foo->GetOpDesc(), ge::ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, original_names);
  3715. AddDumpOriginName(foo, "f", sub_graph);
  3716. original_names.clear();
  3717. (void)ge::AttrUtils::GetListStr(bar->GetOpDesc(), ge::ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, original_names);
  3718. EXPECT_EQ(original_names.size(), 1U);
  3719. EXPECT_EQ(original_names[0], "foo/f/bar/f/bar");
  3720. original_names.push_back("abc");
  3721. (void)ge::AttrUtils::SetListStr(foo->GetOpDesc(), ge::ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, original_names);
  3722. AddDumpOriginName(foo, "f", sub_graph);
  3723. original_names.clear();
  3724. (void)ge::AttrUtils::GetListStr(bar->GetOpDesc(), ge::ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, original_names);
  3725. EXPECT_EQ(original_names.size(), 2U);
  3726. EXPECT_EQ(original_names[0], "foo/f/bar/f/bar/f/bar");
  3727. EXPECT_EQ(original_names[1], "abc");
  3728. }
  3729. } // namespace ge