You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

graph_preprocess.cc 105 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
5 years ago
5 years ago
5 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
4 years ago
4 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
4 years ago
4 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
4 years ago
5 years ago
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/preprocess/graph_preprocess.h"
  17. #include <map>
  18. #include <set>
  19. #include <string>
  20. #include "common/formats/format_transfers/format_transfer_fractal_nz.h"
  21. #include "common/formats/format_transfers/format_transfer_nchw_nc1hwc0.h"
  22. #include "common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.h"
  23. #include "common/formats/format_transfers/format_transfer_transpose.h"
  24. #include "common/formats/utils/formats_trans_utils.h"
  25. #include "framework/common/helper/model_helper.h"
  26. #include "common/math/math_util.h"
  27. #include "framework/common/op/ge_op_utils.h"
  28. #include "ir_build/option_utils.h"
  29. #include "common/ge_call_wrapper.h"
  30. #include "common/local_context.h"
  31. #include "common/transop_util.h"
  32. #include "graph/ge_context.h"
  33. #include "graph/shape_refiner.h"
  34. #include "graph/manager/graph_var_manager.h"
  35. #include "graph/manager/util/rt_context_util.h"
  36. #include "graph/optimize/graph_optimize.h"
  37. #include "graph/passes/addn_pass.h"
  38. #include "graph/passes/aicpu_constant_folding_pass.h"
  39. #include "graph/passes/assert_pass.h"
  40. #include "graph/passes/common_subexpression_elimination_pass.h"
  41. #include "graph/passes/cond_pass.h"
  42. #include "graph/passes/cond_remove_pass.h"
  43. #include "graph/passes/constant_folding_pass.h"
  44. #include "graph/passes/dimension_adjust_pass.h"
  45. #include "graph/passes/dimension_compute_pass.h"
  46. #include "graph/passes/dropout_pass.h"
  47. #include "graph/passes/enter_pass.h"
  48. #include "graph/passes/for_pass.h"
  49. #include "graph/passes/guarantee_const_pass.h"
  50. #include "graph/passes/hccl_memcpy_pass.h"
  51. #include "graph/passes/hccl_group_pass.h"
  52. #include "graph/passes/identity_pass.h"
  53. #include "graph/passes/infershape_pass.h"
  54. #include "graph/passes/infer_value_range_pass.h"
  55. #include "graph/passes/merge_pass.h"
  56. #include "graph/passes/net_output_pass.h"
  57. #include "graph/passes/no_use_reshape_remove_pass.h"
  58. #include "graph/passes/parallel_concat_start_op_pass.h"
  59. #include "graph/passes/placeholder_with_default_pass.h"
  60. #include "graph/passes/prevent_gradient_pass.h"
  61. #include "graph/passes/print_op_pass.h"
  62. #include "graph/passes/prune_pass.h"
  63. #include "graph/passes/replace_transshape_pass.h"
  64. #include "graph/passes/replace_with_empty_const_pass.h"
  65. #include "graph/passes/resource_pair_add_control_pass.h"
  66. #include "graph/passes/resource_pair_remove_control_pass.h"
  67. #include "graph/passes/save_pass.h"
  68. #include "graph/passes/shape_operate_op_remove_pass.h"
  69. #include "graph/passes/snapshot_pass.h"
  70. #include "graph/passes/stop_gradient_pass.h"
  71. #include "graph/passes/switch_dead_branch_elimination.h"
  72. #include "graph/passes/unused_const_pass.h"
  73. #include "graph/passes/var_is_initialized_op_pass.h"
  74. #include "graph/passes/variable_prepare_op_pass.h"
  75. #include "graph/passes/mark_force_unknown_for_cond_pass.h"
  76. #include "graph/preprocess/insert_op/util_insert_aipp_op.h"
  77. #include "graph/utils/type_utils.h"
  78. #include "inc/pass_manager.h"
  79. #include "init/gelib.h"
  80. #include "graph/preprocess/multi_batch_copy_graph.h"
  81. #include "graph/passes/data_pass.h"
  82. #include "graph/passes/mark_agnostic_pass.h"
  83. namespace ge {
  84. namespace {
  85. static std::map<std::string, ge::DataType> output_type_str_to_datatype = {
  86. {"FP32", ge::DT_FLOAT}, {"FP16", ge::DT_FLOAT16}, {"INT8", ge::DT_INT8}, {"INT16", ge::DT_INT16},
  87. {"UINT16", ge::DT_UINT16}, {"UINT8", ge::DT_UINT8}, {"INT32", ge::DT_INT32}, {"INT64", ge::DT_INT64},
  88. {"UINT32", ge::DT_UINT32}, {"UINT64", ge::DT_UINT64}, {"DOUBLE", ge::DT_DOUBLE}};
  89. const char *const kMbatchSwitchnName = "mbatch-switch-name";
  90. // the size of user defined output datatype or format string after split by ":".
  91. const size_t kUserDefinedElementCount = 2;
  92. const int kDataOutIndex = 0;
  93. const int64_t kInvalidDynaimcDimsType = -1;
  94. OpDescPtr CreateTensorShape(const GeTensorDesc &data_tensor) {
  95. GeTensorPtr tensor = MakeShared<GeTensor>();
  96. if (tensor == nullptr) {
  97. REPORT_CALL_ERROR("E19999", "New GeTensor failed");
  98. GELOGE(INTERNAL_ERROR, "[New][GeTensor] failed");
  99. return nullptr;
  100. }
  101. tensor->MutableTensorDesc().SetDataType(DT_INT32);
  102. tensor->MutableTensorDesc().SetFormat(FORMAT_ND);
  103. auto dst_ge_shape = data_tensor.GetShape();
  104. auto dim_cnt = static_cast<int64_t>(dst_ge_shape.GetDimNum());
  105. if (dim_cnt == 0) { // if the dim_cnt is 0, the tensor is a scalar
  106. tensor->MutableTensorDesc().SetShape(GeShape());
  107. int32_t dst_shape = 1;
  108. if (tensor->SetData(reinterpret_cast<const uint8_t *>(&dst_shape), sizeof(int32_t)) != GRAPH_SUCCESS) {
  109. REPORT_CALL_ERROR("E19999", "Set data to tensor failed");
  110. GELOGE(INTERNAL_ERROR, "[Set][Data] to tensor failed");
  111. return nullptr;
  112. }
  113. } else {
  114. tensor->MutableTensorDesc().SetShape(GeShape(std::vector<int64_t>({dim_cnt})));
  115. unique_ptr<int32_t[]> dst_shape(new (std::nothrow) int32_t[dim_cnt]());
  116. if (dst_shape == nullptr) {
  117. REPORT_CALL_ERROR("E19999", "Malloc buffer failed, size:%zu", dim_cnt);
  118. GELOGE(INTERNAL_ERROR, "[Malloc][Buffer] failed, size:%zu", dim_cnt);
  119. return nullptr;
  120. }
  121. for (int64_t i = 0; i < dim_cnt; ++i) {
  122. dst_shape[i] = dst_ge_shape.GetDim(static_cast<size_t>(i));
  123. }
  124. GE_IF_BOOL_EXEC(
  125. tensor->SetData(reinterpret_cast<const uint8_t *>(dst_shape.get()), dim_cnt * sizeof(int32_t)) != GRAPH_SUCCESS,
  126. REPORT_CALL_ERROR("E19999", "Set data to tensor failed");
  127. GELOGE(INTERNAL_ERROR, "[Set][Data] to tensor failed");
  128. return nullptr;)
  129. }
  130. GELOGD("Create shape input dim [%s]", dst_ge_shape.ToString().c_str());
  131. return OpDescUtils::CreateConstOp(tensor);
  132. }
  133. void AddTransNodeAttr(const std::string &node_type, const GeTensorDesc &input, const GeTensorDesc &output,
  134. OpDescPtr &op_desc) {
  135. // For format transfer node, the IR definition has src/dst format attrs
  136. if (node_type == TRANSDATA) {
  137. GE_IF_BOOL_EXEC(
  138. !AttrUtils::SetStr(op_desc, FORMAT_TRANSFER_SRC_FORMAT, TypeUtils::FormatToSerialString(input.GetFormat())),
  139. GELOGW("SetStr FORMAT_TRANSFER_SRC_FORMAT failed");)
  140. GE_IF_BOOL_EXEC(
  141. !AttrUtils::SetStr(op_desc, FORMAT_TRANSFER_DST_FORMAT, TypeUtils::FormatToSerialString(output.GetFormat())),
  142. GELOGW("SetStr FORMAT_TRANSFER_DST_FORMAT failed");)
  143. }
  144. // For TransposeD node, the IR definition has perm attrs
  145. if (node_type == TRANSPOSED) {
  146. Format src_format = input.GetFormat();
  147. Format dst_format = output.GetFormat();
  148. std::vector<int64_t> perm_arg;
  149. GE_CHK_BOOL_EXEC_WARN(formats::GetPermByForamt(src_format, dst_format, perm_arg) == SUCCESS, return,
  150. "Get perm by foramt failed.");
  151. GE_CHK_BOOL_EXEC_WARN(AttrUtils::SetListInt(op_desc, PERMUTE_ATTR_PERM, perm_arg), return,
  152. "SetStr PERMUTE_ATTR_PERM failed")
  153. }
  154. // For cast node, the IR definition has src/dst attrs
  155. if (node_type == CAST) {
  156. GE_IF_BOOL_EXEC(!AttrUtils::SetInt(op_desc, CAST_ATTR_SRCT, static_cast<int64_t>(input.GetDataType())),
  157. GELOGW("SetInt CAST_ATTR_SRCT failed");)
  158. GE_IF_BOOL_EXEC(!AttrUtils::SetInt(op_desc, CAST_ATTR_DSTT, static_cast<int64_t>(output.GetDataType())),
  159. GELOGW("SetInt CAST_ATTR_DSTT failed");)
  160. GE_IF_BOOL_EXEC(!AttrUtils::SetInt(op_desc, CAST_ATTR_DST_TYPE, static_cast<int64_t>(output.GetDataType())),
  161. GELOGW("SetInt CAST_ATTR_DST_TYPE failed");)
  162. GE_IF_BOOL_EXEC(!AttrUtils::SetBool(op_desc, CAST_ATTR_TRUNCATE, false),
  163. GELOGW("SetBool CAST_ATTR_TRUNCATE failed");)
  164. }
  165. }
  166. NodePtr CreateTransNode(const std::string &name, const std::string &node_type, const GeTensorDesc &input,
  167. const GeTensorDesc &output, NodePtr &node) {
  168. if (node == nullptr) {
  169. REPORT_INNER_ERROR("E19999", "Param node is nullptr, trans_name:%s, trans_type:%s, check invalid",
  170. name.c_str(), node_type.c_str());
  171. GELOGE(PARAM_INVALID, "[Check][Param] Param node is nullptr, trans_name:%s, trans_type:%s",
  172. name.c_str(), node_type.c_str());
  173. return nullptr;
  174. }
  175. auto graph = node->GetOwnerComputeGraph();
  176. if (graph == nullptr) {
  177. REPORT_INNER_ERROR("E19999", "Owner graph in node is nullptr, trans_name:%s, trans_type:%s, check invalid",
  178. name.c_str(), node_type.c_str());
  179. GELOGE(PARAM_INVALID, "[Get][OwnerGraph] in node is nullptr, trans_name:%s, trans_type:%s",
  180. name.c_str(), node_type.c_str());
  181. return nullptr;
  182. }
  183. auto index = TransOpUtil::GetTransOpDataIndex(node_type);
  184. if (index < 0) {
  185. REPORT_INNER_ERROR("E19999", "The trans node type %s does not exists, it must be %s",
  186. node_type.c_str(), TransOpUtil::TransopMapToString().c_str());
  187. GELOGE(INTERNAL_ERROR, "[Check][Param] The trans node type %s does not exists", node_type.c_str());
  188. return nullptr;
  189. }
  190. OpDescPtr op_desc = MakeShared<OpDesc>(name, node_type);
  191. if (op_desc == nullptr) {
  192. REPORT_CALL_ERROR("E19999", "New OpDesc failed, trans_name:%s, trans_type:%s",
  193. name.c_str(), node_type.c_str());
  194. GELOGE(INTERNAL_ERROR, "[New][OpDesc] failed, trans_name:%s, trans_type:%s",
  195. name.c_str(), node_type.c_str());
  196. return nullptr;
  197. }
  198. // for data dump
  199. GE_IF_BOOL_EXEC(
  200. !AttrUtils::SetListStr(op_desc, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, std::move(std::vector<std::string>())),
  201. GELOGW("CreateTransNode: SetListStr failed");)
  202. // Default single input and single output
  203. auto ret = op_desc->AddInputDesc(input);
  204. if (ret != GRAPH_SUCCESS) {
  205. REPORT_CALL_ERROR("E19999", "Add input desc into op:%s(%s) failed",
  206. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  207. GELOGE(INTERNAL_ERROR, "[Add][InputDesc] into op:%s(%s) failed",
  208. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  209. return nullptr;
  210. }
  211. ret = op_desc->AddOutputDesc(output);
  212. if (ret != GRAPH_SUCCESS) {
  213. REPORT_CALL_ERROR("E19999", "Add output desc into op:%s(%s) failed",
  214. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  215. GELOGE(INTERNAL_ERROR, "[Add][OutputDesc] into op:%s(%s) failed",
  216. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  217. return nullptr;
  218. }
  219. AddTransNodeAttr(node_type, input, output, op_desc);
  220. NodePtr shape_node = nullptr;
  221. if (node_type == RESHAPE) {
  222. auto shape_desc = CreateTensorShape(output);
  223. if (shape_desc == nullptr) {
  224. GELOGE(INTERNAL_ERROR, "[Create][TensorShape] Failed to add shape for reshape %s",
  225. node->GetName().c_str());
  226. return nullptr;
  227. }
  228. ret = op_desc->AddInputDesc(shape_desc->GetOutputDesc(0));
  229. if (ret != GRAPH_SUCCESS) {
  230. REPORT_CALL_ERROR("E19999", "Add input desc into op:%s(%s) failed",
  231. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  232. GELOGE(INTERNAL_ERROR, "[Add][InputDesc] into op:%s(%s) failed",
  233. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  234. return nullptr;
  235. }
  236. shape_node = graph->AddNode(shape_desc);
  237. if (shape_node == nullptr) {
  238. REPORT_CALL_ERROR("E19999", "Add node:%s(%s) to graph:%s failed",
  239. shape_desc->GetName().c_str(), shape_desc->GetType().c_str(),
  240. graph->GetName().c_str());
  241. GELOGE(INTERNAL_ERROR, "[Add][Node] %s(%s) to graph:%s failed",
  242. shape_desc->GetName().c_str(), shape_desc->GetType().c_str(), graph->GetName().c_str());
  243. return nullptr;
  244. }
  245. }
  246. auto trans_node = graph->AddNode(op_desc);
  247. if (trans_node == nullptr) {
  248. REPORT_CALL_ERROR("E19999", "Add node:%s(%s) to graph:%s failed",
  249. op_desc->GetName().c_str(), op_desc->GetType().c_str(),
  250. graph->GetName().c_str());
  251. GELOGE(INTERNAL_ERROR, "[Add][Node] %s(%s) to graph:%s failed",
  252. op_desc->GetName().c_str(), op_desc->GetType().c_str(), graph->GetName().c_str());
  253. return nullptr;
  254. }
  255. if (node_type == RESHAPE) {
  256. if (GraphUtils::AddEdge(shape_node->GetOutDataAnchor(0), trans_node->GetInDataAnchor(1)) != GRAPH_SUCCESS) {
  257. REPORT_CALL_ERROR("E19999", "Add edge between op:%s(%s)(out_index:0) and op:%s(%s)(in_index:1) failed",
  258. shape_node->GetName().c_str(), shape_node->GetType().c_str(),
  259. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  260. GELOGE(INTERNAL_ERROR, "[Add][Edge] between op:%s(%s)(out_index:0) and op:%s(%s)(in_index:1) failed",
  261. shape_node->GetName().c_str(), shape_node->GetType().c_str(),
  262. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  263. return nullptr;
  264. }
  265. }
  266. return trans_node;
  267. }
  268. Status RecoverOneTransNodeForVar(const std::string &name, const TransNodeInfo &trans_node_info, NodePtr node,
  269. NodePtr &trans_node) {
  270. GE_CHECK_NOTNULL(node);
  271. trans_node = CreateTransNode(name, trans_node_info.node_type, trans_node_info.output, trans_node_info.input, node);
  272. if (trans_node == nullptr) {
  273. return INTERNAL_ERROR;
  274. }
  275. auto ret = GraphUtils::ReplaceNodeDataAnchors(trans_node, node, {}, {0});
  276. if (ret != GRAPH_SUCCESS) {
  277. REPORT_CALL_ERROR("E19999", "Replace out anchors of node:%s(%s) by node:%s(%s) failed",
  278. node->GetName().c_str(), node->GetType().c_str(),
  279. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  280. GELOGE(INTERNAL_ERROR, "[Replace][OutAnchors] of node:%s(%s) by node:%s(%s) failed",
  281. node->GetName().c_str(), node->GetType().c_str(),
  282. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  283. return INTERNAL_ERROR;
  284. }
  285. ret = GraphUtils::AddEdge(node->GetOutDataAnchor(0), trans_node->GetInDataAnchor(0));
  286. if (ret != GRAPH_SUCCESS) {
  287. REPORT_CALL_ERROR("E19999", "Add edge between op:%s(%s)(out_index:0) and op:%s(%s)(in_index:0) failed",
  288. node->GetName().c_str(), node->GetType().c_str(),
  289. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  290. GELOGE(INTERNAL_ERROR, "[Add][Edge] between op:%s(%s)(out_index:0) and op:%s(%s)(in_index:0) failed",
  291. node->GetName().c_str(), node->GetType().c_str(),
  292. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  293. return INTERNAL_ERROR;
  294. }
  295. ret = GraphUtils::MoveOutCtrlEdges(node, trans_node);
  296. if (ret != GRAPH_SUCCESS) {
  297. REPORT_CALL_ERROR("E19999", "Move out control edges from node:%s(%s) to node:%s(%s) failed",
  298. node->GetName().c_str(), node->GetType().c_str(),
  299. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  300. GELOGE(INTERNAL_ERROR, "[MoveOut][ControlEdges] from node:%s(%s) to node:%s(%s) failed",
  301. node->GetName().c_str(), node->GetType().c_str(),
  302. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  303. return INTERNAL_ERROR;
  304. }
  305. return SUCCESS;
  306. }
  307. Status RecoverOneTransNodeForVarRef(const std::string &name, const TransNodeInfo &trans_node_info, NodePtr node,
  308. NodePtr &trans_node) {
  309. GE_CHECK_NOTNULL(node);
  310. trans_node = CreateTransNode(name, trans_node_info.node_type, trans_node_info.input, trans_node_info.output, node);
  311. if (trans_node == nullptr) {
  312. return INTERNAL_ERROR;
  313. }
  314. auto ret = GraphUtils::ReplaceNodeDataAnchors(trans_node, node, {0}, {});
  315. if (ret != GRAPH_SUCCESS) {
  316. REPORT_CALL_ERROR("E19999", "Replace out anchors of node:%s(%s) by node:%s(%s) failed",
  317. node->GetName().c_str(), node->GetType().c_str(),
  318. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  319. GELOGE(INTERNAL_ERROR, "[Replace][OutAnchors] of node:%s(%s) by node:%s(%s) failed",
  320. node->GetName().c_str(), node->GetType().c_str(),
  321. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  322. return INTERNAL_ERROR;
  323. }
  324. ret = GraphUtils::AddEdge(trans_node->GetOutDataAnchor(0), node->GetInDataAnchor(0));
  325. if (ret != GRAPH_SUCCESS) {
  326. REPORT_CALL_ERROR("E19999", "Add edge between op:%s(%s)(out_index:0) and op:%s(%s)(in_index:0) failed",
  327. trans_node->GetName().c_str(), trans_node->GetType().c_str(),
  328. node->GetName().c_str(), node->GetType().c_str());
  329. GELOGE(INTERNAL_ERROR, "[Add][Edge] between op:%s(%s)(out_index:0) and op:%s(%s)(in_index:0) failed",
  330. trans_node->GetName().c_str(), trans_node->GetType().c_str(),
  331. node->GetName().c_str(), node->GetType().c_str());
  332. return INTERNAL_ERROR;
  333. }
  334. ret = GraphUtils::MoveInCtrlEdges(node, trans_node);
  335. if (ret != GRAPH_SUCCESS) {
  336. REPORT_CALL_ERROR("E19999", "Move in control edges from node:%s(%s) to node:%s(%s) failed",
  337. node->GetName().c_str(), node->GetType().c_str(),
  338. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  339. GELOGE(INTERNAL_ERROR, "[MoveIn][CtrlEdges] from node:%s(%s) to node:%s(%s) failed",
  340. node->GetName().c_str(), node->GetType().c_str(),
  341. trans_node->GetName().c_str(), trans_node->GetType().c_str());
  342. return INTERNAL_ERROR;
  343. }
  344. return SUCCESS;
  345. }
  346. Status UpdateVarFormats(const NodePtr &var, const GeTensorDesc &tensor_desc) {
  347. GE_IF_BOOL_EXEC(var == nullptr, GELOGW("node : var is nullptr"); return INTERNAL_ERROR);
  348. GE_CHECK_NOTNULL(var->GetOpDesc());
  349. if (var->GetOpDesc()->GetOutputsSize() > 0) {
  350. auto output_desc = var->GetOpDesc()->GetOutputDesc(0);
  351. output_desc.SetFormat(tensor_desc.GetFormat());
  352. output_desc.SetDataType(tensor_desc.GetDataType());
  353. output_desc.SetShape(tensor_desc.GetShape());
  354. output_desc.SetOriginFormat(tensor_desc.GetOriginFormat());
  355. output_desc.SetOriginDataType(tensor_desc.GetOriginDataType());
  356. output_desc.SetOriginShape(tensor_desc.GetOriginShape());
  357. GE_IF_BOOL_EXEC(var->GetOpDesc()->UpdateOutputDesc(0, output_desc) != GRAPH_SUCCESS,
  358. REPORT_CALL_ERROR("E19999", "Update output desc of node:%s(%s) failed, index:0,",
  359. var->GetName().c_str(), var->GetType().c_str());
  360. GELOGE(INTERNAL_ERROR, "[Update][OutputDesc] of node:%s(%s) failed, index:0,",
  361. var->GetName().c_str(), var->GetType().c_str());
  362. return INTERNAL_ERROR;);
  363. }
  364. if (var->GetOpDesc()->GetInputsSize() > 0) {
  365. auto desc = var->GetOpDesc()->GetInputDesc(0);
  366. desc.SetFormat(tensor_desc.GetFormat());
  367. desc.SetDataType(tensor_desc.GetDataType());
  368. desc.SetShape(tensor_desc.GetShape());
  369. desc.SetOriginFormat(tensor_desc.GetOriginFormat());
  370. desc.SetOriginDataType(tensor_desc.GetOriginDataType());
  371. desc.SetOriginShape(tensor_desc.GetOriginShape());
  372. GE_IF_BOOL_EXEC(var->GetOpDesc()->UpdateInputDesc(0, desc) != GRAPH_SUCCESS,
  373. REPORT_CALL_ERROR("E19999", "Update input desc of node:%s(%s) failed, index:0,",
  374. var->GetName().c_str(), var->GetType().c_str());
  375. GELOGE(INTERNAL_ERROR, "[Update][InputDesc] of node:%s(%s) failed, index:0,",
  376. var->GetName().c_str(), var->GetType().c_str());
  377. return INTERNAL_ERROR;)
  378. }
  379. return SUCCESS;
  380. }
  381. Status RecoverTransRoadForVar(const NodePtr &var, const VarTransRoad &road) {
  382. GE_CHECK_NOTNULL(var);
  383. static std::atomic_int index(0);
  384. NodePtr last_node = var;
  385. for (auto iter = road.rbegin(); iter != road.rend(); ++iter) {
  386. auto trans_name = var->GetName() + "_trans_" + std::to_string(index++);
  387. auto ret = RecoverOneTransNodeForVar(trans_name, *iter, last_node, last_node);
  388. if (ret != SUCCESS) {
  389. REPORT_CALL_ERROR("E19999", "Failed to recover trans node for variable %s, index %s, type %s",
  390. var->GetName().c_str(), std::to_string(index).c_str(), iter->node_type.c_str());
  391. GELOGE(INTERNAL_ERROR, "[Recover][TransNode] for variable %s, index %s, type %s", var->GetName().c_str(),
  392. std::to_string(index).c_str(), iter->node_type.c_str());
  393. return INTERNAL_ERROR;
  394. }
  395. // set stream_label
  396. OpDescPtr var_desc = var->GetOpDesc();
  397. GE_CHECK_NOTNULL(var_desc);
  398. std::string stream_label;
  399. (void)AttrUtils::GetStr(var_desc, ATTR_NAME_STREAM_LABEL, stream_label);
  400. if (!stream_label.empty()) {
  401. auto status = SetStreamLabel(last_node, stream_label);
  402. if (status != ge::SUCCESS) {
  403. REPORT_CALL_ERROR("E19999", "Set stream_label:%s to op:%s(%s) failed",
  404. stream_label.c_str(), last_node->GetName().c_str(), last_node->GetType().c_str());
  405. GELOGE(status, "[Set][StreamLabel] %s to op:%s(%s) failed.",
  406. stream_label.c_str(), last_node->GetName().c_str(), last_node->GetType().c_str());
  407. return status;
  408. }
  409. }
  410. GE_CHK_BOOL_EXEC((ge::AttrUtils::SetBool(last_node->GetOpDesc(), ge::ATTR_INSERTED_BY_GE, true)),
  411. REPORT_CALL_ERROR("E19999", "Set Attr:%s to node:%s(%s) failed",
  412. ge::ATTR_INSERTED_BY_GE.c_str(),
  413. last_node->GetName().c_str(), last_node->GetType().c_str());
  414. return INTERNAL_ERROR,
  415. "[Set][Attr] %s to node:%s(%s) failed", ge::ATTR_INSERTED_BY_GE.c_str(),
  416. last_node->GetName().c_str(), last_node->GetType().c_str());
  417. GELOGD("Recover trans node %s type %s success", trans_name.c_str(), iter->node_type.c_str());
  418. }
  419. if (road.empty()) {
  420. return SUCCESS;
  421. }
  422. return UpdateVarFormats(var, road.rbegin()->output);
  423. }
  424. Status RecoverTransRoadForVarRef(const std::set<NodePtr> &nodes, const VarTransRoad &road) {
  425. for (auto &var : nodes) {
  426. GE_CHECK_NOTNULL(var);
  427. static std::atomic_int index(0);
  428. NodePtr last_node = var;
  429. GELOGI("Recover trans nodes for variable ref %s", var->GetName().c_str());
  430. for (auto iter = road.rbegin(); iter != road.rend(); ++iter) {
  431. auto trans_name = var->GetName() + "_trans_" + std::to_string(index++);
  432. auto ret = RecoverOneTransNodeForVarRef(trans_name, *iter, last_node, last_node);
  433. if (ret != SUCCESS) {
  434. REPORT_CALL_ERROR("E19999", "Failed to recover trans node for variable %s, index %s, type %s",
  435. var->GetName().c_str(), std::to_string(index).c_str(), iter->node_type.c_str());
  436. GELOGE(INTERNAL_ERROR, "[Recover][TransNode] for variable %s failed, index %s, type %s",
  437. var->GetName().c_str(), std::to_string(index).c_str(), iter->node_type.c_str());
  438. return INTERNAL_ERROR;
  439. }
  440. // set stream_label
  441. OpDescPtr var_desc = var->GetOpDesc();
  442. GE_CHECK_NOTNULL(var_desc);
  443. std::string stream_label;
  444. (void)AttrUtils::GetStr(var_desc, ATTR_NAME_STREAM_LABEL, stream_label);
  445. if (!stream_label.empty()) {
  446. auto status = SetStreamLabel(last_node, stream_label);
  447. if (status != ge::SUCCESS) {
  448. REPORT_CALL_ERROR("E19999", "Set stream_label:%s to op:%s(%s) failed",
  449. stream_label.c_str(), last_node->GetName().c_str(), last_node->GetType().c_str());
  450. GELOGE(status, "[Set][StreamLabel] %s to op:%s(%s) failed.",
  451. stream_label.c_str(), last_node->GetName().c_str(), last_node->GetType().c_str());
  452. return status;
  453. }
  454. }
  455. GE_CHK_BOOL_EXEC((ge::AttrUtils::SetBool(last_node->GetOpDesc(), ge::ATTR_INSERTED_BY_GE, true)),
  456. REPORT_CALL_ERROR("E19999", "Set Attr:%s of node:%s(%s) failed",
  457. ge::ATTR_INSERTED_BY_GE.c_str(),
  458. last_node->GetName().c_str(), last_node->GetType().c_str());
  459. return INTERNAL_ERROR,
  460. "[Set][Attr] %s of node:%s(%s) failed", ge::ATTR_INSERTED_BY_GE.c_str(),
  461. last_node->GetName().c_str(), last_node->GetType().c_str());
  462. }
  463. if (!(road.empty()) && (UpdateVarFormats(var, road.rbegin()->output) != SUCCESS)) {
  464. return INTERNAL_ERROR;
  465. }
  466. }
  467. return SUCCESS;
  468. }
  469. using VarNamesToRefs = std::map<std::string, std::set<NodePtr>>;
  470. VarNamesToRefs CollectVarNamesToRefs(const ComputeGraphPtr &graph) {
  471. VarNamesToRefs names_to_refs;
  472. std::string var_name;
  473. if (graph == nullptr) {
  474. REPORT_INNER_ERROR("E19999", "Param graph is nullptr, check invalid");
  475. GELOGE(PARAM_INVALID, "[Check][Param] graph is nullptr.");
  476. return names_to_refs;
  477. }
  478. for (auto &node : graph->GetAllNodes()) {
  479. if (node->GetType() != VARIABLE) {
  480. continue;
  481. }
  482. if (AttrUtils::GetStr(node->GetOpDesc(), REF_VAR_SRC_VAR_NAME, var_name)) {
  483. (void)names_to_refs[var_name].insert(node);
  484. }
  485. }
  486. return names_to_refs;
  487. }
  488. Status TransferShape2NC1HWC0(Format src_format, const std::vector<int64_t> &src_shape, DataType dt, Format dst_format,
  489. std::vector<int64_t> &dst_shape) {
  490. if (src_format == FORMAT_NCHW) {
  491. formats::FormatTransferNchwNc1hwc0 transfer;
  492. if (transfer.TransShape(src_format, src_shape, dt, dst_format, dst_shape) != SUCCESS) {
  493. GELOGE(INTERNAL_ERROR, "[Trans][Shape] failed");
  494. return FAILED;
  495. }
  496. } else if (src_format == FORMAT_NHWC) {
  497. formats::FormatTransferNhwcNc1hwc0 transfer;
  498. if (transfer.TransShape(src_format, src_shape, dt, dst_format, dst_shape) != SUCCESS) {
  499. GELOGE(INTERNAL_ERROR, "[Trans][Shape] failed");
  500. return FAILED;
  501. }
  502. }
  503. return SUCCESS;
  504. }
  505. Status ModifyInputFormatAndShape(NodePtr &node_ptr) {
  506. GE_CHECK_NOTNULL(node_ptr);
  507. auto op_desc = node_ptr->GetOpDesc();
  508. GE_CHECK_NOTNULL(op_desc);
  509. const GeTensorDescPtr &input = op_desc->MutableInputDesc(0);
  510. GE_CHECK_NOTNULL(input);
  511. ge::Format old_format = input->GetFormat();
  512. std::vector<int64_t> old_shape = input->GetShape().GetDims();
  513. ge::DataType dt = input->GetDataType();
  514. std::vector<int64_t> dst_shape_dims;
  515. if (TransferShape2NC1HWC0(old_format, old_shape, dt, FORMAT_NC1HWC0, dst_shape_dims) != SUCCESS) {
  516. REPORT_CALL_ERROR("E19999", "Transfer shape to NC1HWC0 failed, op:%s(%s),",
  517. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  518. GELOGE(INTERNAL_ERROR, "[Transfer][Shape] to NC1HWC0 failed, op:%s(%s),",
  519. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  520. return FAILED;
  521. }
  522. input->SetFormat(FORMAT_NC1HWC0);
  523. input->SetShape(ge::GeShape(dst_shape_dims));
  524. auto output = op_desc->MutableOutputDesc(0);
  525. GE_CHECK_NOTNULL(output);
  526. output->SetFormat(FORMAT_NC1HWC0);
  527. output->SetShape(ge::GeShape(dst_shape_dims));
  528. int64_t size = 0;
  529. graphStatus graph_status = TensorUtils::GetTensorMemorySizeInBytes(*output, size);
  530. if (graph_status != ge::GRAPH_SUCCESS) {
  531. REPORT_CALL_ERROR("E19999", "Get output tensor size failed, op:%s(%s), index:0",
  532. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  533. GELOGE(graph_status, "[Get][TensorSize] In Bytes failed, op:%s(%s), index:0",
  534. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  535. return FAILED;
  536. }
  537. ge::TensorUtils::SetSize(*output, size);
  538. ge::TensorUtils::SetSize(*input, size);
  539. return SUCCESS;
  540. }
  541. Status ModifyFormatAndShapeForSingleTensor(const GeTensorDescPtr &input_output) {
  542. GE_CHECK_NOTNULL(input_output);
  543. ge::Format old_format = input_output->GetFormat();
  544. std::vector<int64_t> old_shape = input_output->GetShape().GetDims();
  545. ge::DataType dt = input_output->GetDataType();
  546. std::vector<int64_t> dst_shape_dims;
  547. if (TransferShape2NC1HWC0(old_format, old_shape, dt, FORMAT_NC1HWC0, dst_shape_dims) != SUCCESS) {
  548. GELOGE(INTERNAL_ERROR, "[Trans][Shape] to NC1HWC0 failed");
  549. return FAILED;
  550. }
  551. input_output->SetFormat(FORMAT_NC1HWC0);
  552. input_output->SetShape(ge::GeShape(dst_shape_dims));
  553. return SUCCESS;
  554. }
  555. Status ModifyDataNetOutputFormatAndShape(OpDescPtr &op_desc, uint32_t index, Format storage_format,
  556. vector<int64_t> &dst_shape_dims) {
  557. GE_CHECK_NOTNULL(op_desc);
  558. const GeTensorDescPtr &input = op_desc->MutableInputDesc(index);
  559. GE_CHECK_NOTNULL(input);
  560. ge::Format old_format = input->GetFormat();
  561. std::vector<int64_t> old_shape = input->GetShape().GetDims();
  562. input->SetShape(ge::GeShape(dst_shape_dims));
  563. input->SetFormat(storage_format);
  564. auto output = op_desc->MutableOutputDesc(index);
  565. GE_CHECK_NOTNULL(output);
  566. output->SetShape(ge::GeShape(dst_shape_dims));
  567. output->SetFormat(storage_format);
  568. if (!output->MutableShape().IsUnknownShape()) {
  569. int64_t size = 0;
  570. graphStatus graph_status = TensorUtils::GetTensorMemorySizeInBytes(*output, size);
  571. if (graph_status != ge::GRAPH_SUCCESS) {
  572. REPORT_CALL_ERROR("E19999", "Get output tensor size failed, op:%s(%s), index:%u",
  573. op_desc->GetName().c_str(), op_desc->GetType().c_str(), index);
  574. GELOGE(graph_status, "[Get][TensorSize] In Bytes failed, op:%s(%s), index:%u",
  575. op_desc->GetName().c_str(), op_desc->GetType().c_str(), index);
  576. return FAILED;
  577. }
  578. ge::TensorUtils::SetSize(*input, size);
  579. ge::TensorUtils::SetSize(*output, size);
  580. GELOGI("Modify Data NetOutput format and shape success, node:%s, index:%d, old_shape:%s, old_Format:%s, "
  581. "new_shape:%s, new_format:%s, new_size:%lu",
  582. op_desc->GetName().c_str(), index, formats::JoinToString(old_shape).c_str(),
  583. ge::TypeUtils::FormatToSerialString(old_format).c_str(), formats::JoinToString(dst_shape_dims).c_str(),
  584. ge::TypeUtils::FormatToSerialString(storage_format).c_str(), size);
  585. }
  586. return SUCCESS;
  587. }
  588. Status CheckIfDynamicBatchScene(NodePtr &data_node, bool &is_dynamic_batch, NodePtr &mbatch_node, int32_t &index) {
  589. is_dynamic_batch = false;
  590. std::string related_node_name;
  591. if (AttrUtils::GetStr(data_node->GetOpDesc(), kMbatchSwitchnName, related_node_name)) {
  592. if (related_node_name.empty()) {
  593. REPORT_INNER_ERROR("E19999", "The data node %s has switchn node flag, but the value is empty",
  594. data_node->GetName().c_str());
  595. GELOGE(INTERNAL_ERROR, "[Check][Param] The data node %s has switchn node flag, but the value is empty",
  596. data_node->GetName().c_str());
  597. return INTERNAL_ERROR;
  598. }
  599. auto out_data_nodes_anchors = data_node->GetOutDataNodesAndAnchors();
  600. for (const auto &out_data_node_anchor : out_data_nodes_anchors) {
  601. if (out_data_node_anchor.first->GetName() == related_node_name) {
  602. mbatch_node = out_data_node_anchor.first;
  603. index = out_data_node_anchor.second->GetIdx();
  604. break;
  605. }
  606. }
  607. if (mbatch_node == nullptr) {
  608. REPORT_INNER_ERROR("E19999", "The data node %s has switchn node %s, but can not find it on the graph",
  609. data_node->GetName().c_str(), related_node_name.c_str());
  610. GELOGE(INTERNAL_ERROR, "[Check][Param] The data node %s has switchn node %s, but can not find it on the graph",
  611. data_node->GetName().c_str(), related_node_name.c_str());
  612. return INTERNAL_ERROR;
  613. }
  614. is_dynamic_batch = true;
  615. }
  616. return SUCCESS;
  617. }
  618. bool CheckOpType(const NodePtr &node, const std::string type) {
  619. if (node->GetType() == type) {
  620. return true;
  621. }
  622. return false;
  623. }
  624. Status CheckIfNeedSetNdFormat(const NodePtr &node_ptr) {
  625. auto op = node_ptr->GetOpDesc();
  626. GE_CHECK_NOTNULL(op);
  627. auto inputDescsPtr = op->GetAllInputsDescPtr();
  628. auto outputDescsPtr = op->GetAllOutputsDescPtr();
  629. ge::Format format = ge::FORMAT_ND;
  630. // if user set shape larger than 4, inferformat may set NCHW or NHWC, GE should set ND before FE
  631. // process, otherwise fe will insert transdata.
  632. for (auto &inputDescPtr : inputDescsPtr) {
  633. GE_CHECK_NOTNULL(inputDescPtr);
  634. if ((inputDescPtr->GetShape().GetDims().size() > ge::DIM_DEFAULT_SIZE) &&
  635. ((inputDescPtr->GetFormat() == ge::FORMAT_NCHW) || (inputDescPtr->GetFormat() == ge::FORMAT_NHWC))) {
  636. GELOGI("The node inputdesc [%s] format need to be set ND", op->GetName().c_str());
  637. inputDescPtr->SetFormat(format);
  638. inputDescPtr->SetOriginFormat(format);
  639. }
  640. }
  641. for (auto &outputDescPtr : outputDescsPtr) {
  642. GE_CHECK_NOTNULL(outputDescPtr);
  643. if ((outputDescPtr->GetShape().GetDims().size() > ge::DIM_DEFAULT_SIZE) &&
  644. ((outputDescPtr->GetFormat() == ge::FORMAT_NCHW) || (outputDescPtr->GetFormat() == ge::FORMAT_NHWC))) {
  645. GELOGI("The node outputdesc [%s] format need to be set ND", op->GetName().c_str());
  646. outputDescPtr->SetFormat(format);
  647. outputDescPtr->SetOriginFormat(format);
  648. }
  649. }
  650. return SUCCESS;
  651. }
  652. // A new function ending in 'DynShape' has been added for the dynamic shape processing.
  653. // In the dynamic shape process, transnode insertion by FE is advanced to the stage of whole
  654. // graph optimization, GE only sets the final data_type/format/shape information for variable,
  655. // data and netoutput, and no longer inserts the transnode.
  656. Status ProcessInputDtDynShape(NodePtr &node_ptr, NodePtr &switchn_node, DataType &dt_set) {
  657. GE_CHECK_NOTNULL(node_ptr);
  658. auto op_desc = node_ptr->GetOpDesc();
  659. GE_CHECK_NOTNULL(op_desc);
  660. const GeTensorDescPtr &input = op_desc->MutableInputDesc(0);
  661. GE_CHECK_NOTNULL(input);
  662. ge::DataType src_dtype = input->GetDataType();
  663. if (src_dtype == dt_set) {
  664. GELOGI("The node name, %s dtype is fp16", node_ptr->GetName().c_str());
  665. return SUCCESS;
  666. }
  667. input->SetDataType(dt_set);
  668. const GeTensorDescPtr &output = op_desc->MutableOutputDesc(0);
  669. GE_CHECK_NOTNULL(output);
  670. output->SetDataType(dt_set);
  671. GeShape shape = input->GetShape();
  672. if (!shape.IsUnknownShape()) {
  673. int64_t input_shape_size = 0;
  674. int64_t output_shape_size = 0;
  675. ge::graphStatus input_graph_status = ge::TensorUtils::GetTensorSizeInBytes(*input, input_shape_size);
  676. ge::graphStatus output_graph_status = ge::TensorUtils::GetTensorMemorySizeInBytes(*input, output_shape_size);
  677. if (input_graph_status != ge::GRAPH_SUCCESS && output_graph_status != ge::GRAPH_SUCCESS) {
  678. REPORT_CALL_ERROR("E19999", "Get input tensor size failed, op:%s(%s), index:0",
  679. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  680. GELOGE(GRAPH_FAILED, "[Process][InputOp] Get tensor size of op [%s] failed!", node_ptr->GetName().c_str());
  681. return FAILED;
  682. }
  683. ge::TensorUtils::SetSize(*input, input_shape_size);
  684. ge::TensorUtils::SetSize(*output, output_shape_size);
  685. GELOGI("[Process][InputDynShape] Set input and output size of node [%s] success.", node_ptr->GetName().c_str());
  686. }
  687. return SUCCESS;
  688. }
  689. Status UpdateInputOutputDataType(NodePtr &mbatch_node, DataType &dt_set, int32_t index) {
  690. auto mbatch_desc = mbatch_node->GetOpDesc();
  691. GE_CHECK_NOTNULL(mbatch_desc);
  692. auto mbatch_input = mbatch_desc->MutableInputDesc(index);
  693. GE_CHECK_NOTNULL(mbatch_input);
  694. mbatch_input->SetDataType(dt_set);
  695. if (mbatch_node->GetType() == SWITCHN) {
  696. for (uint32_t i = 0; i < mbatch_node->GetAllOutDataAnchorsSize(); ++i) {
  697. const GeTensorDescPtr &mbatch_output = mbatch_desc->MutableOutputDesc(i);
  698. GE_CHECK_NOTNULL(mbatch_output);
  699. mbatch_output->SetDataType(dt_set);
  700. }
  701. }
  702. GELOGD("Update input and output data type of node[name: %s, type: %s, input index: %d] to %s.",
  703. mbatch_node->GetName().c_str(), mbatch_node->GetType().c_str(), index,
  704. TypeUtils::DataTypeToSerialString(dt_set).c_str());
  705. return SUCCESS;
  706. }
  707. Status UpdateSubgraphDataOfCase(NodePtr &mbatch_node, DataType &dt_set, int32_t index) {
  708. if (mbatch_node->GetType() != CASE) {
  709. return SUCCESS;
  710. }
  711. std::vector<ComputeGraphPtr> subgraphs;
  712. if (NodeUtils::GetSubgraphs(mbatch_node, subgraphs) != GRAPH_SUCCESS) {
  713. REPORT_CALL_ERROR("E19999", "Get subgraphs of node %s failed", mbatch_node->GetName().c_str());
  714. GELOGE(FAILED, "[Check][Param] Get subgraphs of node %s failed", mbatch_node->GetName().c_str());
  715. return FAILED;
  716. }
  717. for (const auto &subgraph : subgraphs) {
  718. GE_CHECK_NOTNULL(subgraph);
  719. for (auto &sub_node : subgraph->GetDirectNode()) {
  720. GE_CHECK_NOTNULL(sub_node);
  721. if (sub_node->GetType() != DATA) {
  722. continue;
  723. }
  724. auto data_desc = sub_node->GetOpDesc();
  725. GE_CHECK_NOTNULL(data_desc);
  726. int32_t parent_node_index = 0;
  727. if (!AttrUtils::GetInt(data_desc, ATTR_NAME_PARENT_NODE_INDEX, parent_node_index) ||
  728. (parent_node_index != index)) {
  729. continue;
  730. }
  731. auto data_input = data_desc->MutableInputDesc(0);
  732. GE_CHECK_NOTNULL(data_input);
  733. data_input->SetDataType(dt_set);
  734. auto data_output = data_desc->MutableOutputDesc(0);
  735. GE_CHECK_NOTNULL(data_output);
  736. data_output->SetDataType(dt_set);
  737. GELOGD("Update input and output data type of node[name: %s, type: %s, parent_node_index: %d] in subgraph %s "
  738. "to %s.", data_desc->GetName().c_str(), data_desc->GetType().c_str(), parent_node_index,
  739. subgraph->GetName().c_str(), TypeUtils::DataTypeToSerialString(dt_set).c_str());
  740. }
  741. }
  742. return SUCCESS;
  743. }
  744. Status ProcessMbatchScene(NodePtr &mbatch_node, DataType &dt_set, int32_t index) {
  745. GELOGI("The node [%s] dtype set fp16.", mbatch_node->GetName().c_str());
  746. if (UpdateInputOutputDataType(mbatch_node, dt_set, index) != SUCCESS) {
  747. GELOGE(FAILED, "[Update][InputOutputDataType] of node[name: %s, type: %s] to %s failed.",
  748. mbatch_node->GetName().c_str(), mbatch_node->GetType().c_str(),
  749. TypeUtils::DataTypeToSerialString(dt_set).c_str());
  750. return FAILED;
  751. }
  752. if (UpdateSubgraphDataOfCase(mbatch_node, dt_set, index) != SUCCESS) {
  753. GELOGE(FAILED, "[Update][SubgraphDataOfCase] node[parent_node_index:%d] in subgraphs of "
  754. "node[name:%s, type:%s] to %s failed.", index, mbatch_node->GetName().c_str(),
  755. mbatch_node->GetType().c_str(), TypeUtils::DataTypeToSerialString(dt_set).c_str());
  756. return FAILED;
  757. }
  758. return SUCCESS;
  759. }
  760. Status ProcessInputNC1HWC0DynShape(NodePtr &node_ptr, bool &is_dynamic_batch, NodePtr &switchn_node) {
  761. GE_CHECK_NOTNULL(node_ptr);
  762. auto op_desc = node_ptr->GetOpDesc();
  763. GE_CHECK_NOTNULL(op_desc);
  764. const GeTensorDescPtr &input = op_desc->MutableInputDesc(0);
  765. GE_CHECK_NOTNULL(input);
  766. ge::Format old_format = input->GetFormat();
  767. ge::GeShape old_shape = input->GetShape();
  768. bool support = ((old_format == FORMAT_NC1HWC0) || (old_format == FORMAT_NCHW) || (old_format == FORMAT_NHWC));
  769. if (!support) {
  770. REPORT_INNER_ERROR("E19999",
  771. "The format:%s of op:%s(%s) is unsupported, only support FORMAT_NC1HWC0,FORMAT_NCHW,FORMAT_NHWC",
  772. TypeUtils::FormatToSerialString(old_format).c_str(),
  773. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  774. GELOGE(INTERNAL_ERROR, "[Check][Param] The format [%s] is unsupported, op:%s",
  775. TypeUtils::FormatToSerialString(old_format).c_str(), op_desc->GetName().c_str());
  776. return FAILED;
  777. }
  778. if (ModifyInputFormatAndShape(node_ptr) != SUCCESS) {
  779. GELOGE(INTERNAL_ERROR, "[Modify][InputFormatAndShape] failed, op:%s(%s)",
  780. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  781. return FAILED;
  782. }
  783. if (is_dynamic_batch) {
  784. auto switchn_op_desc = switchn_node->GetOpDesc();
  785. GE_CHECK_NOTNULL(switchn_op_desc);
  786. const GeTensorDescPtr &switchn_input = switchn_op_desc->MutableInputDesc(0);
  787. if (ModifyFormatAndShapeForSingleTensor(switchn_input) != SUCCESS) {
  788. REPORT_CALL_ERROR("E19999", "Modify format and shape of input:0 in op:%s(%s) failed",
  789. switchn_op_desc->GetName().c_str(), switchn_op_desc->GetType().c_str());
  790. GELOGE(INTERNAL_ERROR, "[Modify][FormatAndShape] of input:0 in op:%s(%s) failed",
  791. switchn_op_desc->GetName().c_str(), switchn_op_desc->GetType().c_str());
  792. return FAILED;
  793. }
  794. for (uint32_t i = 0; i < switchn_node->GetAllOutDataAnchorsSize(); ++i) {
  795. auto switchn_output = switchn_op_desc->MutableOutputDesc(i);
  796. GE_CHECK_NOTNULL(switchn_output);
  797. old_format = switchn_output->GetFormat();
  798. old_shape = switchn_output->GetShape();
  799. if (ModifyFormatAndShapeForSingleTensor(switchn_output) != SUCCESS) {
  800. REPORT_CALL_ERROR("E19999", "Modify format and shape of output:%u in op:%s(%s) failed", i,
  801. switchn_op_desc->GetName().c_str(), switchn_op_desc->GetType().c_str());
  802. GELOGE(INTERNAL_ERROR, "[Modify][FormatAndShape] of output:%u in op:%s(%s) failed", i,
  803. switchn_op_desc->GetName().c_str(), switchn_op_desc->GetType().c_str());
  804. return FAILED;
  805. }
  806. }
  807. }
  808. return SUCCESS;
  809. }
  810. Status ProcessDataNodeDynShape(NodePtr &node_ptr) {
  811. auto op_desc = node_ptr->GetOpDesc();
  812. GE_CHECK_NOTNULL(op_desc);
  813. string set_dt_str;
  814. if (!ge::AttrUtils::GetStr(node_ptr->GetOpDesc(), ATTR_ATC_USER_DEFINE_DATATYPE, set_dt_str)) {
  815. return SUCCESS;
  816. }
  817. DataType dt_set = TypeUtils::SerialStringToDataType(set_dt_str);
  818. GELOGI("input_fp16 is found, the node name is %s.", node_ptr->GetName().c_str());
  819. bool is_dynamic_batch = false;
  820. NodePtr mbatch_node = nullptr;
  821. int32_t index = 0;
  822. if (CheckIfDynamicBatchScene(node_ptr, is_dynamic_batch, mbatch_node, index)) {
  823. GELOGE(INTERNAL_ERROR, "[Call][CheckIfDynamicBatchScene] failed, op:%s", op_desc->GetName().c_str());
  824. return FAILED;
  825. }
  826. if (ProcessInputDtDynShape(node_ptr, mbatch_node, dt_set) != SUCCESS) {
  827. GELOGE(INTERNAL_ERROR, "[Process][InputDtDynShape] ProcessInputFP16 failed, op:%s", op_desc->GetName().c_str());
  828. return FAILED;
  829. }
  830. if (is_dynamic_batch && ProcessMbatchScene(mbatch_node, dt_set, index) != SUCCESS) {
  831. GELOGE(INTERNAL_ERROR, "[Process][MbatchScene] failed");
  832. return FAILED;
  833. }
  834. // check if need to set format
  835. string set_format;
  836. bool ret = ge::AttrUtils::GetStr(node_ptr->GetOpDesc(), ATTR_ATC_USER_DEFINE_FORMAT, set_format);
  837. if (ret && (!set_format.empty()) && TypeUtils::SerialStringToFormat(set_format) == FORMAT_NC1HWC0) {
  838. GELOGI("The format of node [%s] should be set NC1HWC0.", node_ptr->GetName().c_str());
  839. if (ProcessInputNC1HWC0DynShape(node_ptr, is_dynamic_batch, mbatch_node) != SUCCESS) {
  840. GELOGE(INTERNAL_ERROR, "[Process][InputNC1HWC0] failed, op:%s", node_ptr->GetName().c_str());
  841. return FAILED;
  842. }
  843. }
  844. return SUCCESS;
  845. }
  846. Status GetStorageFormatAndShape(OpDescPtr &op_desc, const GeTensorDescPtr &tensor_desc_ptr,
  847. Format &storage_format, vector<int64_t> &dst_shape_dims) {
  848. GE_CHECK_NOTNULL(op_desc);
  849. GE_CHECK_NOTNULL(tensor_desc_ptr);
  850. storage_format = FORMAT_RESERVED;
  851. int64_t format = FORMAT_RESERVED;
  852. dst_shape_dims.clear();
  853. if (ge::AttrUtils::GetInt(*tensor_desc_ptr, ATTR_NAME_STORAGE_FORMAT, format)) {
  854. storage_format = static_cast<Format>(format);
  855. vector<int32_t> storage_shape;
  856. if (ge::AttrUtils::GetListInt(*tensor_desc_ptr, ATTR_NAME_STORAGE_SHAPE, storage_shape)) {
  857. for (auto dim : storage_shape) {
  858. dst_shape_dims.push_back(static_cast<int64_t>(dim));
  859. }
  860. GELOGI("Update node by storage format, node: [%s], storage_format: [%s], storage_shape:[%s]",
  861. op_desc->GetName().c_str(), TypeUtils::FormatToSerialString(storage_format).c_str(),
  862. formats::JoinToString(storage_shape).c_str());
  863. } else {
  864. ErrorManager::GetInstance().ATCReportErrMessage(
  865. "15003", {"opname", "format"},
  866. {op_desc->GetName(), TypeUtils::FormatToSerialString(storage_format)});
  867. GELOGE(PARAM_INVALID, "[Check][Param] Update node by storage format failed, storage_shape not set. "
  868. "node:[%s], storage_format [%s]",
  869. op_desc->GetName().c_str(), TypeUtils::FormatToSerialString(storage_format).c_str());
  870. return FAILED;
  871. }
  872. ge::Format old_format = tensor_desc_ptr->GetFormat();
  873. auto old_shape = tensor_desc_ptr->GetShape().GetDims();
  874. if (old_format == storage_format && old_shape == dst_shape_dims) {
  875. GELOGI("Update node by storage format, not changed.");
  876. storage_format = FORMAT_RESERVED;
  877. return SUCCESS;
  878. }
  879. }
  880. return SUCCESS;
  881. }
  882. Status ProcessNetoutputNodeFp16Nc1hwc0DynShape(GeTensorDesc &src_desc, GeTensorDescPtr &net_output_input_desc,
  883. NodePtr &node) {
  884. bool is_dynamic = CheckOpType(node, MERGE);
  885. auto src_op_desc = node->GetOpDesc();
  886. GE_CHECK_NOTNULL(src_op_desc);
  887. ge::GeShape src_shape = src_desc.GetShape();
  888. ge::Format src_format = src_desc.GetFormat();
  889. net_output_input_desc->SetDataType(DT_FLOAT16);
  890. if (is_dynamic) {
  891. auto merge_output = src_op_desc->MutableOutputDesc(0);
  892. GE_CHECK_NOTNULL(merge_output);
  893. merge_output->SetDataType(DT_FLOAT16);
  894. for (uint32_t i = 0; i < node->GetAllInDataAnchorsSize(); ++i) {
  895. auto merge_input = src_op_desc->MutableInputDesc(i);
  896. GE_CHECK_NOTNULL(merge_input);
  897. merge_input->SetDataType(DT_FLOAT16);
  898. }
  899. }
  900. std::vector<int64_t> dst_shape_dims;
  901. std::vector<int64_t> src_shape_dims = src_shape.GetDims();
  902. if (TransferShape2NC1HWC0(src_format, src_shape_dims, DT_FLOAT16, FORMAT_NC1HWC0, dst_shape_dims) != SUCCESS) {
  903. REPORT_CALL_ERROR("E19999", "Transfer output:0 shape of op:%s(%s) to NC1HWC0 format failed, shape:%s, format:%s",
  904. src_op_desc->GetName().c_str(), src_op_desc->GetType().c_str(),
  905. src_shape.ToString().c_str(), TypeUtils::FormatToSerialString(src_format).c_str());
  906. GELOGE(INTERNAL_ERROR, "[Trans][Shape] of op:%s(%s) to NC1HWC0 format failed, shape:%s, format:%s",
  907. src_op_desc->GetName().c_str(), src_op_desc->GetType().c_str(),
  908. src_shape.ToString().c_str(), TypeUtils::FormatToSerialString(src_format).c_str());
  909. return FAILED;
  910. }
  911. ge::GeShape dst_shape(dst_shape_dims);
  912. net_output_input_desc->SetFormat(FORMAT_NC1HWC0);
  913. net_output_input_desc->SetShape(dst_shape);
  914. if (is_dynamic) {
  915. auto merge_out = src_op_desc->MutableOutputDesc(0);
  916. GE_CHECK_NOTNULL(merge_out);
  917. if (ModifyFormatAndShapeForSingleTensor(merge_out) != SUCCESS) {
  918. REPORT_CALL_ERROR("E19999", "Modify format and shape of output:0 in op:%s(%s) failed",
  919. src_op_desc->GetName().c_str(), src_op_desc->GetType().c_str());
  920. GELOGE(INTERNAL_ERROR, "[Modify][FormatAndShape] of output:0 in op:%s(%s) failed",
  921. src_op_desc->GetName().c_str(), src_op_desc->GetType().c_str());
  922. return FAILED;
  923. }
  924. for (uint32_t i = 0; i < node->GetAllInDataAnchorsSize(); ++i) {
  925. auto merge_in = src_op_desc->MutableInputDesc(i);
  926. GE_CHECK_NOTNULL(merge_in);
  927. if (ModifyFormatAndShapeForSingleTensor(merge_in) != SUCCESS) {
  928. REPORT_CALL_ERROR("E19999", "Modify format and shape of input:%u in op:%s(%s) failed", i,
  929. src_op_desc->GetName().c_str(), src_op_desc->GetType().c_str());
  930. GELOGE(INTERNAL_ERROR, "[Modify][FormatAndShape] of input:%u in op:%s(%s) failed", i,
  931. src_op_desc->GetName().c_str(), src_op_desc->GetType().c_str());
  932. return FAILED;
  933. }
  934. }
  935. }
  936. return SUCCESS;
  937. }
  938. bool NeedUpdateDtByOutputTypeParm(OpDescPtr &netout_desc, uint32_t &index, ge::DataType &dt) {
  939. GE_CHECK_NOTNULL(netout_desc);
  940. vector<string> output_dt_str;
  941. if (ge::AttrUtils::GetListStr(netout_desc, ATTR_ATC_USER_DEFINE_DATATYPE, output_dt_str)) {
  942. for (auto dt_str : output_dt_str) {
  943. vector<string> dt_str_split = StringUtils::Split(dt_str, ':');
  944. if (dt_str_split.size() == kUserDefinedElementCount) {
  945. if (dt_str_split[0] == to_string(index)) {
  946. dt = TypeUtils::SerialStringToDataType(dt_str_split[1]);
  947. GELOGI("Find netoutput node output %u datatype should be set %s .", index,
  948. TypeUtils::DataTypeToSerialString(dt).c_str());
  949. return true;
  950. }
  951. }
  952. }
  953. }
  954. return false;
  955. }
  956. bool NeedUpdateFormatByOutputTypeParm(OpDescPtr &netout_desc, uint32_t &index) {
  957. GE_CHECK_NOTNULL(netout_desc);
  958. vector<string> output_format_str;
  959. if (ge::AttrUtils::GetListStr(netout_desc, ATTR_ATC_USER_DEFINE_FORMAT, output_format_str)) {
  960. for (auto format_str : output_format_str) {
  961. vector<string> format_str_split = StringUtils::Split(format_str, ':');
  962. if (format_str_split.size() == kUserDefinedElementCount) {
  963. if (format_str_split[0] == to_string(index)) {
  964. GELOGI("Find netoutput node output %u format should be set NC1HWC0.", index);
  965. return true;
  966. }
  967. }
  968. }
  969. }
  970. return false;
  971. }
  972. Status ProcessNetoutputNodeDynShape(NodePtr &node) {
  973. auto op_desc = node->GetOpDesc();
  974. GE_CHECK_NOTNULL(op_desc);
  975. ge::DataType output_data_type = ge::DT_FLOAT;
  976. for (const auto &in_anchor : node->GetAllInDataAnchors()) {
  977. auto index = static_cast<uint32_t>(in_anchor->GetIdx());
  978. auto peer_out = in_anchor->GetPeerOutAnchor();
  979. GE_CHECK_NOTNULL(peer_out);
  980. auto src_node = peer_out->GetOwnerNode();
  981. GE_CHECK_NOTNULL(src_node);
  982. bool is_dynamic = CheckOpType(src_node, MERGE);
  983. OpDescPtr src_op_desc = src_node->GetOpDesc();
  984. GE_CHECK_NOTNULL(src_op_desc);
  985. auto net_output_input_desc = op_desc->MutableInputDesc(index);
  986. GE_CHECK_NOTNULL(net_output_input_desc);
  987. ge::GeShape old_shape = net_output_input_desc->GetShape();
  988. ge::Format old_format = net_output_input_desc->GetFormat();
  989. ge::DataType old_dtype = net_output_input_desc->GetDataType();
  990. // Update datatype
  991. if (NeedUpdateDtByOutputTypeParm(op_desc, index, output_data_type)) {
  992. GELOGI("Enter into process output_type schedule");
  993. net_output_input_desc->SetDataType(output_data_type);
  994. if (is_dynamic) {
  995. auto merge_output = src_op_desc->MutableOutputDesc(0);
  996. GE_CHECK_NOTNULL(merge_output);
  997. merge_output->SetDataType(output_data_type);
  998. for (uint32_t i = 0; i < src_node->GetAllInDataAnchorsSize(); ++i) {
  999. auto merge_input = src_op_desc->MutableInputDesc(i);
  1000. GE_CHECK_NOTNULL(merge_input);
  1001. merge_input->SetDataType(output_data_type);
  1002. }
  1003. }
  1004. }
  1005. // check if is_output_adjust_hw_layout is set
  1006. if (NeedUpdateFormatByOutputTypeParm(op_desc, index)) {
  1007. if ((old_format != FORMAT_NCHW) && (old_format != FORMAT_NHWC) && (old_format != FORMAT_NC1HWC0)) {
  1008. REPORT_INNER_ERROR("E19999", "Format:%s of op:%s(%s) is not one of NCHW, NHWC, NC1HWC0.",
  1009. TypeUtils::FormatToSerialString(old_format).c_str(),
  1010. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  1011. GELOGE(INTERNAL_ERROR, "[Check][Param] Format is not one of NCHW, NHWC, NC1HWC0.");
  1012. return FAILED;
  1013. }
  1014. GeTensorDesc old_desc(old_shape, old_format, old_dtype);
  1015. if (ProcessNetoutputNodeFp16Nc1hwc0DynShape(old_desc, net_output_input_desc, src_node) != SUCCESS) {
  1016. GELOGE(INTERNAL_ERROR, "[Process][NetOutput] fp16 nc1hwc0 failed.");
  1017. return FAILED;
  1018. }
  1019. }
  1020. }
  1021. return SUCCESS;
  1022. }
  1023. Status GetDynamicInputShapeRange(const std::vector<GeTensor> &user_input, const std::map<string, string> &graph_option,
  1024. vector<vector<std::pair<int64_t, int64_t>>> &range_vec) {
  1025. // check both mode and shape_range option are all enabled
  1026. auto mode_iter = graph_option.find(OPTION_EXEC_DYNAMIC_EXECUTE_MODE);
  1027. bool enable_dynamic_execute_mode = (mode_iter != graph_option.end()) && (mode_iter->second == "dynamic_execute");
  1028. if (!enable_dynamic_execute_mode) {
  1029. GELOGD("Graph Option: Can not find %s option in graph options or option value is empty",
  1030. OPTION_EXEC_DYNAMIC_EXECUTE_MODE);
  1031. }
  1032. auto iter = graph_option.find(OPTION_EXEC_DATA_INPUTS_SHAPE_RANGE);
  1033. bool enable_input_shape_range = (iter != graph_option.end()) && (!iter->second.empty());
  1034. if (!enable_input_shape_range) {
  1035. GELOGD("Graph Option: Can not find %s option in graph options or option value is empty",
  1036. OPTION_EXEC_DATA_INPUTS_SHAPE_RANGE);
  1037. }
  1038. if (enable_dynamic_execute_mode && enable_input_shape_range) {
  1039. GELOGD("GraphOption: %s value is dynamic_execute, %s value is %s.", OPTION_EXEC_DYNAMIC_EXECUTE_MODE,
  1040. OPTION_EXEC_DATA_INPUTS_SHAPE_RANGE, iter->second.c_str());
  1041. } else if (!enable_dynamic_execute_mode && !enable_input_shape_range) {
  1042. return SUCCESS;
  1043. } else {
  1044. REPORT_INNER_ERROR("E19999", "Graph option: %s and %s should be enabled at the same time, check invalid",
  1045. OPTION_EXEC_DYNAMIC_EXECUTE_MODE, OPTION_EXEC_DATA_INPUTS_SHAPE_RANGE);
  1046. GELOGE(PARAM_INVALID, "[Check][Param] Graph option: %s and %s should be enabled at the same time.",
  1047. OPTION_EXEC_DYNAMIC_EXECUTE_MODE, OPTION_EXEC_DATA_INPUTS_SHAPE_RANGE);
  1048. return PARAM_INVALID;
  1049. }
  1050. if (ParseInputShapeRange(iter->second, range_vec) != SUCCESS) {
  1051. GELOGE(PARAM_INVALID, "[Parse][ShapeRange] Parse dynamic input shape range failed.");
  1052. return PARAM_INVALID;
  1053. }
  1054. if (range_vec.size() != user_input.size()) {
  1055. GELOGE(PARAM_INVALID, "[Check][Param] Dynamic input shape range size is %zu, inputs size is %zu. Not match.",
  1056. range_vec.size(), user_input.size());
  1057. return PARAM_INVALID;
  1058. }
  1059. return SUCCESS;
  1060. }
  1061. Status UpdateDynamicInputShapeRange(const ge::GeAttrValue::INT index,
  1062. const vector<vector<std::pair<int64_t, int64_t>>> &range_vec, OpDescPtr &op,
  1063. GeTensorDesc &desc) {
  1064. auto origin_shape = desc.GetShape();
  1065. auto current_shape_range_vec = range_vec.at(index);
  1066. if (origin_shape.IsScalar()) {
  1067. GELOGI("Cur input %ld is scalar, no need set shape range.", index);
  1068. return SUCCESS;
  1069. }
  1070. if (current_shape_range_vec.size() != origin_shape.GetDimNum()) {
  1071. REPORT_INNER_ERROR("E19999", "Given shape_range dim num is %zu, current dim:%s num is %zu, not match, "
  1072. "check invalid", current_shape_range_vec.size(), origin_shape.ToString().c_str(),
  1073. origin_shape.GetDimNum());
  1074. GELOGE(PARAM_INVALID, "[Check][Param] Given shape_range dim num is %zu, current dim num is %zu, "
  1075. "not match.Pleace Check.", current_shape_range_vec.size(), origin_shape.GetDimNum());
  1076. return PARAM_INVALID;
  1077. }
  1078. for (size_t i = 0; i < origin_shape.GetDimNum(); ++i) {
  1079. auto curr_dim = origin_shape.GetDim(i);
  1080. auto left_range = current_shape_range_vec.at(i).first;
  1081. auto right_range = current_shape_range_vec.at(i).second;
  1082. if (left_range == right_range) {
  1083. // given shape_range is known dim, check is same as origin or not
  1084. if (curr_dim != left_range) {
  1085. REPORT_INNER_ERROR("E19999", "Given shape range is %ld, current dim shape is %ld, not match, dim_index:%zu, "
  1086. "check invalid", left_range, curr_dim, i);
  1087. GELOGE(PARAM_INVALID, "[Check][Param] Given shape range is %ld, current dim shape is %ld, "
  1088. "not match.Pleace Check.", left_range, curr_dim);
  1089. return PARAM_INVALID;
  1090. }
  1091. origin_shape.SetDim(i, left_range);
  1092. } else {
  1093. // given shape_range is fix range, check input_shape is in this range or not
  1094. if (right_range != UNKNOWN_DIM) {
  1095. if ((curr_dim < left_range) || (curr_dim > right_range)) {
  1096. REPORT_INNER_ERROR("E19999", "Given shape range is [%ld~%ld], current dim shape is %ld, out of range, "
  1097. "dim_index:%zu, check invalid",
  1098. left_range, right_range, curr_dim, i);
  1099. GELOGE(PARAM_INVALID, "[Check][Param] Given shape range is [%ld~%ld], current dim shape is %ld, "
  1100. "out of range.Pleace Check.", left_range, right_range, curr_dim);
  1101. return PARAM_INVALID;
  1102. }
  1103. }
  1104. origin_shape.SetDim(i, UNKNOWN_DIM);
  1105. }
  1106. }
  1107. desc.SetShape(origin_shape);
  1108. desc.SetShapeRange(current_shape_range_vec);
  1109. graphStatus graph_ret = op->UpdateInputDesc(0, desc);
  1110. GE_CHK_GRAPH_STATUS_RET(graph_ret, "[Update][InputDesc] fail, graph ret: %u", graph_ret);
  1111. graph_ret = op->UpdateOutputDesc(0, desc);
  1112. GE_CHK_GRAPH_STATUS_RET(graph_ret, "[Update][OutputDesc] fail, graph ret: %u", graph_ret);
  1113. return SUCCESS;
  1114. }
  1115. } // namespace
  1116. GraphPrepare::GraphPrepare() : compute_graph_(nullptr) {}
  1117. GraphPrepare::~GraphPrepare() {}
  1118. /**
  1119. * @param graph
  1120. * @return
  1121. */
  1122. Status GraphPrepare::UpdateVariableFormats(ComputeGraphPtr &graph) {
  1123. GE_CHECK_NOTNULL(graph);
  1124. auto var_names_to_refs = CollectVarNamesToRefs(graph);
  1125. for (auto &node : graph->GetAllNodes()) {
  1126. if (node == nullptr) {
  1127. continue;
  1128. }
  1129. if (node->GetType() != VARIABLE) {
  1130. continue;
  1131. }
  1132. auto trans_road = VarManager::Instance(graph->GetSessionID())->GetTransRoad(node->GetName());
  1133. if (trans_road == nullptr) {
  1134. GELOGD("The variable %s does not have any trans road", node->GetName().c_str());
  1135. continue;
  1136. }
  1137. GELOGI("Recover the trans road for var %s reversely", node->GetName().c_str());
  1138. auto ret = RecoverTransRoadForVar(node, *trans_road);
  1139. if (ret != SUCCESS) {
  1140. GELOGE(INTERNAL_ERROR, "[Recover][TransRoad] for var %s failed", node->GetName().c_str());
  1141. return INTERNAL_ERROR;
  1142. }
  1143. auto iter = var_names_to_refs.find(node->GetName());
  1144. if (iter != var_names_to_refs.end()) {
  1145. ret = RecoverTransRoadForVarRef(iter->second, *trans_road);
  1146. if (ret != SUCCESS) {
  1147. GELOGE(INTERNAL_ERROR, "[Recover][TransRoad] for var ref %s failed", node->GetName().c_str());
  1148. return INTERNAL_ERROR;
  1149. }
  1150. }
  1151. }
  1152. return SUCCESS;
  1153. }
  1154. void GraphPrepare::SetOptions(const ge::GraphManagerOptions &options) { options_ = options; }
  1155. Status GraphPrepare::Init(const ge::Graph &graph, uint64_t session_id) {
  1156. compute_graph_ = GraphUtils::GetComputeGraph(graph);
  1157. if (compute_graph_ != nullptr) {
  1158. compute_graph_->SetSessionID(session_id);
  1159. }
  1160. session_id_ = session_id;
  1161. Status ret = CheckGraph();
  1162. if (ret != SUCCESS) {
  1163. GELOGE(ret, "[Check][Graph] fail, ret:%u", ret);
  1164. return ret;
  1165. }
  1166. (void)compute_graph_->TopologicalSorting();
  1167. ret = CheckRefOp();
  1168. if (ret != SUCCESS) {
  1169. GELOGE(ret, "[Check][RefOp] fail, ret:%u", ret);
  1170. return ret;
  1171. }
  1172. return SUCCESS;
  1173. }
  1174. Status GraphPrepare::CheckGraph() {
  1175. if (compute_graph_ == nullptr) {
  1176. REPORT_INNER_ERROR("E19999", "compute_graph_ is nullptr, check invalid");
  1177. GELOGE(GE_GRAPH_INIT_FAILED, "[Check][Param] compute_graph_ is nullptr");
  1178. return GE_GRAPH_INIT_FAILED;
  1179. }
  1180. auto nodes = compute_graph_->GetAllNodes();
  1181. if (nodes.empty()) {
  1182. REPORT_INNER_ERROR("E19999", "nodes in graph is empty, check invalid");
  1183. GELOGE(GE_GRAPH_INIT_FAILED, "[Check][Param] Invalid graph, no nodes in this graph.");
  1184. return GE_GRAPH_INIT_FAILED;
  1185. }
  1186. for (const NodePtr &node : compute_graph_->GetAllNodes()) {
  1187. GE_CHECK_NOTNULL(node);
  1188. if (node->GetOpDesc() == nullptr) {
  1189. REPORT_INNER_ERROR("E19999", "node without opdesc exist in graph, check invalid");
  1190. GELOGE(GE_GRAPH_INIT_FAILED, "[Get][OpDesc] failed, Check Graph node opdesc is NULL");
  1191. return GE_GRAPH_INIT_FAILED;
  1192. }
  1193. }
  1194. return SUCCESS;
  1195. }
  1196. Status GraphPrepare::CheckRefInputNode(const NodePtr &node, const std::string &input_name,
  1197. const std::set<NodePtr> &ref_nodes) {
  1198. // Acceptable input types should be ref node, variable or Switch operator, which is issued by ME for dynamic
  1199. // lossscale and would be optimized in SwitchToStreamSwitchPass.
  1200. // Since ME dont differentiate between RefSwitch and Switch, and only issue Switch.
  1201. static std::set<std::string> acceptable_types = {ge::VARIABLE, ge::VARIABLEV2, ge::VARHANDLEOP,
  1202. ge::REFSWITCH, ge::REFMERGE, ge::REFENTER,
  1203. ge::REFNEXTITERATION, ge::REFEXIT, ge::SWITCH,
  1204. ge::DATA};
  1205. GE_CHECK_NOTNULL(node);
  1206. const auto &op_desc = node->GetOpDesc();
  1207. GE_CHECK_NOTNULL(op_desc);
  1208. const auto input_index = op_desc->GetInputIndexByName(input_name);
  1209. const auto &in_anchor = node->GetInDataAnchor(input_index);
  1210. GE_CHECK_NOTNULL(in_anchor);
  1211. const auto &peer_out_anchor = in_anchor->GetPeerOutAnchor();
  1212. GE_CHECK_NOTNULL(peer_out_anchor);
  1213. const auto &input_node = peer_out_anchor->GetOwnerNode();
  1214. GE_CHECK_NOTNULL(input_node);
  1215. const auto &input_op_desc = input_node->GetOpDesc();
  1216. GE_CHECK_NOTNULL(input_op_desc);
  1217. bool is_ref = (ref_nodes.find(input_node) != ref_nodes.end());
  1218. if (is_ref) {
  1219. return SUCCESS;
  1220. }
  1221. auto input_type = input_op_desc->GetType();
  1222. if (input_type == ge::FRAMEWORKOP) {
  1223. if (!ge::AttrUtils::GetStr(input_op_desc, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, input_type)) {
  1224. REPORT_INNER_ERROR("E19999", "Get Attr:%s of op:%s(%s) failed",
  1225. ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE.c_str(),
  1226. input_op_desc->GetName().c_str(), input_op_desc->GetType().c_str());
  1227. GELOGE(PARAM_INVALID, "[Get][Attr] %s of op:%s(%s) failed", ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE.c_str(),
  1228. input_op_desc->GetName().c_str(), input_op_desc->GetType().c_str());
  1229. return PARAM_INVALID;
  1230. }
  1231. }
  1232. bool is_acceptable = (acceptable_types.find(input_type) != acceptable_types.end());
  1233. if (!is_acceptable) {
  1234. REPORT_INNER_ERROR("E19999", "The ref input of ref node %s[%s] must be ref node or variable, but %s[%s]isn't.",
  1235. node->GetName().c_str(), node->GetType().c_str(), input_op_desc->GetName().c_str(),
  1236. input_op_desc->GetType().c_str());
  1237. GELOGE(PARAM_INVALID, "[Check][Param] The ref input of ref node %s[%s] must be ref node or variable, "
  1238. "but %s[%s]isn't.", node->GetName().c_str(), node->GetType().c_str(), input_op_desc->GetName().c_str(),
  1239. input_op_desc->GetType().c_str());
  1240. return PARAM_INVALID;
  1241. }
  1242. return SUCCESS;
  1243. }
  1244. Status GraphPrepare::CheckRefOp() {
  1245. GE_CHECK_NOTNULL(compute_graph_);
  1246. std::set<NodePtr> ref_nodes;
  1247. for (const NodePtr &node : compute_graph_->GetDirectNode()) {
  1248. if (node == nullptr) {
  1249. REPORT_INNER_ERROR("E19999", "nullptr node exist in graph, check invalid");
  1250. GELOGE(PARAM_INVALID, "[Check][Param] param [node] must not be null.");
  1251. return PARAM_INVALID;
  1252. }
  1253. auto op_desc = node->GetOpDesc();
  1254. if (op_desc == nullptr) {
  1255. REPORT_INNER_ERROR("E19999", "node without opdesc exist in graph, check invalid");
  1256. GELOGE(PARAM_INVALID, "[Check][Param] OpDesc of param [node] must not be null.");
  1257. return PARAM_INVALID;
  1258. }
  1259. auto input_name_index = op_desc->GetAllInputName();
  1260. auto outputs = op_desc->GetAllOutputName();
  1261. for (const auto &name_index : input_name_index) {
  1262. if (op_desc->GetOutputIndexByName(name_index.first) != -1) {
  1263. if (CheckRefInputNode(node, name_index.first, ref_nodes) != SUCCESS) {
  1264. GELOGE(PARAM_INVALID, "[Check][RefInputNode] failed, node:%s.", op_desc->GetName().c_str());
  1265. return PARAM_INVALID;
  1266. }
  1267. (void)ref_nodes.insert(node); // no need to check value
  1268. }
  1269. }
  1270. }
  1271. return SUCCESS;
  1272. };
  1273. Status GraphPrepare::SetRtContext(rtContext_t rt_context, rtCtxMode_t mode) {
  1274. GE_CHECK_NOTNULL(compute_graph_);
  1275. GELOGI("set rt_context, session id: %lu, graph id: %u, mode %d, device id:%u.", session_id_,
  1276. compute_graph_->GetGraphID(), static_cast<int>(mode), ge::GetContext().DeviceId());
  1277. GE_CHK_RT_RET(rtCtxCreate(&rt_context, mode, ge::GetContext().DeviceId()));
  1278. GE_CHK_RT_RET(rtCtxSetCurrent(rt_context));
  1279. RtContextUtil::GetInstance().AddRtContext(session_id_, compute_graph_->GetGraphID(), rt_context);
  1280. return SUCCESS;
  1281. }
  1282. Status GraphPrepare::AdjustDataOpOutput(const NodePtr &node) {
  1283. if (node == nullptr) {
  1284. REPORT_INNER_ERROR("E19999", "Param node is nullptr, check invalid");
  1285. GELOGE(GE_GRAPH_GRAPH_NODE_NULL, "[Check][Param] Input node is nullptr");
  1286. return GE_GRAPH_GRAPH_NODE_NULL;
  1287. }
  1288. OpDescPtr op_desc_ptr = node->GetOpDesc();
  1289. if (op_desc_ptr == nullptr) {
  1290. REPORT_INNER_ERROR("E19999", "Param node's op_desc is nullptr, check invalid");
  1291. GELOGE(GE_GRAPH_GRAPH_NODE_NULL, "[Get][OpDesc] Input node opdesc is NULL");
  1292. return GE_GRAPH_GRAPH_NODE_NULL;
  1293. }
  1294. GeTensorDesc output = op_desc_ptr->GetOutputDesc(0);
  1295. GeShape output_shape = output.GetShape();
  1296. if (output_shape.IsUnknownShape()) {
  1297. GELOGD("[Adjust][DataOpOutput] Shape of op [%s] output is unknown.", node->GetName().c_str());
  1298. return SUCCESS;
  1299. }
  1300. int64_t tensor_size = 0;
  1301. graphStatus graph_status = TensorUtils::GetTensorMemorySizeInBytes(output, tensor_size);
  1302. if (graph_status != GRAPH_SUCCESS) {
  1303. REPORT_CALL_ERROR("E19999", "GetTensorMemorySize by ouput index:0 of op:%s(%s) failed",
  1304. op_desc_ptr->GetName().c_str(), op_desc_ptr->GetType().c_str());
  1305. GELOGE(graph_status, "[Call][GetTensorMemorySizeInBytes] failed, op:%s", node->GetName().c_str());
  1306. return FAILED;
  1307. }
  1308. TensorUtils::SetSize(output, tensor_size);
  1309. graphStatus graph_ret = op_desc_ptr->UpdateOutputDesc(0, output);
  1310. if (graph_ret != GRAPH_SUCCESS) {
  1311. REPORT_CALL_ERROR("E19999", "Update output desc of op:%s(%s) failed, index:0",
  1312. op_desc_ptr->GetName().c_str(), op_desc_ptr->GetType().c_str());
  1313. GELOGE(graph_ret, "[Update][OutputDesc] of op:%s(%s) failed, index:0",
  1314. op_desc_ptr->GetName().c_str(), op_desc_ptr->GetType().c_str());
  1315. return graph_ret;
  1316. }
  1317. return SUCCESS;
  1318. }
  1319. Status GraphPrepare::CheckInternalFormat(const NodePtr &input_node, const GeTensorDesc &desc) {
  1320. auto format = desc.GetFormat();
  1321. auto origin_format = desc.GetOriginFormat();
  1322. auto tune_flag = (options_.build_mode == BUILD_MODE_TUNING) && (options_.build_step == BUILD_STEP_AFTER_BUILDER);
  1323. bool need_check_internal_format = (!IsTansDataOpData(input_node)) && (!options_.is_single_op) && (!tune_flag);
  1324. if (need_check_internal_format) {
  1325. bool is_internal = TypeUtils::IsInternalFormat(format) || TypeUtils::IsInternalFormat(origin_format);
  1326. if (is_internal) {
  1327. std::string reason = "Input format[" + TypeUtils::FormatToSerialString(format) + "] or origin_format[" +
  1328. TypeUtils::FormatToSerialString(origin_format) + "] of op:" + input_node->GetName() +
  1329. " is not support";
  1330. REPORT_INPUT_ERROR("E19025", std::vector<std::string>({"reason"}), std::vector<std::string>({reason}));
  1331. GELOGE(PARAM_INVALID, "[Check][Param] Input format %s or origin_format %s is not support.",
  1332. TypeUtils::FormatToSerialString(format).c_str(), TypeUtils::FormatToSerialString(origin_format).c_str());
  1333. return FAILED;
  1334. }
  1335. }
  1336. return SUCCESS;
  1337. }
  1338. Status GraphPrepare::UpdateDataInputOutputDesc(GeAttrValue::INT index, OpDescPtr &op, GeTensorDesc &desc) {
  1339. auto data_type = desc.GetDataType();
  1340. uint32_t length = 1;
  1341. bool type_ret = TypeUtils::GetDataTypeLength(data_type, length);
  1342. if (!type_ret) {
  1343. std::string reason = "Input datatype[" + TypeUtils::DataTypeToSerialString(data_type) + "] of index:" +
  1344. std::to_string(index) + " input tensor is not support";
  1345. REPORT_INPUT_ERROR("E19025", std::vector<std::string>({"reason"}), std::vector<std::string>({reason}));
  1346. GELOGE(PARAM_INVALID, "[Check][Param] Input datatype %s is not support.",
  1347. TypeUtils::DataTypeToSerialString(data_type).c_str());
  1348. return FAILED;
  1349. }
  1350. int64_t desc_shape = desc.GetShape().GetShapeSize();
  1351. FMK_INT64_UINT32_MULCHECK(desc_shape, length);
  1352. int64_t shape_size = desc_shape * length;
  1353. GE_IF_BOOL_EXEC(shape_size == 0 && desc.GetShape().GetDimNum() == 0, shape_size = static_cast<int64_t>(length));
  1354. int64_t size = 0;
  1355. GE_IF_BOOL_EXEC(ge::TensorUtils::GetSize(desc, size) != GRAPH_SUCCESS,
  1356. REPORT_CALL_ERROR("E19999", "Get size of user input tensor failed, index:%ld", index);
  1357. GELOGE(INTERNAL_ERROR, "[Get][Size] of user input tensor failed, index:%ld", index); return FAILED);
  1358. bool size_check = (size != 0 && shape_size != size);
  1359. if (size_check) {
  1360. std::string reason = "input tensor[index:" + std::to_string(index) + "]'s data size[" + std::to_string(size) +
  1361. "] != shape_size[" + std::to_string(size) + "], check invalid";
  1362. REPORT_INPUT_ERROR("E19025", std::vector<std::string>({"reason"}), std::vector<std::string>({reason}));
  1363. GELOGE(PARAM_INVALID, "[Check][Param] input data size = %ld, shape_size = %ld.", size, shape_size);
  1364. return FAILED;
  1365. }
  1366. ge::TensorUtils::SetSize(desc, shape_size);
  1367. auto tune_flag = (options_.build_mode == BUILD_MODE_TUNING) && (options_.build_step == BUILD_STEP_AFTER_BUILDER);
  1368. if (!tune_flag) {
  1369. graphStatus graph_ret = op->UpdateInputDesc(0, desc);
  1370. if (graph_ret != GRAPH_SUCCESS) {
  1371. REPORT_CALL_ERROR("E19999", "Update input desc of op:%s(%s) failed, index:0",
  1372. op->GetName().c_str(), op->GetType().c_str());
  1373. GELOGE(graph_ret, "[Update][InputDesc] of op:%s(%s) failed, index:0",
  1374. op->GetName().c_str(), op->GetType().c_str());
  1375. return graph_ret;
  1376. }
  1377. // Size will be recalculated in the build stage
  1378. ge::TensorUtils::SetSize(desc, 0);
  1379. graph_ret = op->UpdateOutputDesc(0, desc);
  1380. if (graph_ret != GRAPH_SUCCESS) {
  1381. REPORT_CALL_ERROR("E19999", "Update output desc of op:%s(%s) failed, index:0",
  1382. op->GetName().c_str(), op->GetType().c_str());
  1383. GELOGE(graph_ret, "[Update][OutputDesc] of op:%s(%s) failed, index:0",
  1384. op->GetName().c_str(), op->GetType().c_str());
  1385. return graph_ret;
  1386. }
  1387. } else {
  1388. GELOGI("data %s skip update info in tune mode", op->GetName().c_str());
  1389. }
  1390. return SUCCESS;
  1391. }
  1392. Status GraphPrepare::UpdateInput(const std::vector<GeTensor> &user_input,
  1393. const std::map<string, string> &graph_option) {
  1394. // Get shape range of input in dynamic_execute mode
  1395. vector<vector<std::pair<int64_t, int64_t>>> dynamic_shape_range_vec;
  1396. auto ret = GetDynamicInputShapeRange(user_input, graph_option, dynamic_shape_range_vec);
  1397. GE_CHK_STATUS_RET(ret, "[Get][DynamicInputShapeRange] failed, Graph option is not right on Dynamic execute mode.");
  1398. compute_graph_->SaveDataFormat(ge::TypeUtils::DomiFormatToFormat(GetLocalOmgContext().format));
  1399. for (NodePtr &input_node : compute_graph_->GetDirectNode()) {
  1400. GE_CHECK_NOTNULL(input_node);
  1401. OpDescPtr op = input_node->GetOpDesc();
  1402. GE_CHECK_NOTNULL(op);
  1403. if (op->GetType() == DATA) {
  1404. GeAttrValue::INT index = 0;
  1405. if ((!(AttrUtils::GetInt(op, ATTR_NAME_INDEX, index))) || (GetLocalOmgContext().is_dynamic_input)) {
  1406. GELOGW("Get index from data attr failed");
  1407. continue;
  1408. }
  1409. if ((index < 0) || (static_cast<size_t>(index) >= user_input.size())) {
  1410. std::string reason = "exist data op:" + input_node->GetName() + " index " + std::to_string(index) +
  1411. " bigger than input tensor size[" + std::to_string(user_input.size()) + "], check invalid";
  1412. REPORT_INPUT_ERROR("E19025", std::vector<std::string>({"reason"}), std::vector<std::string>({reason}));
  1413. GELOGE(PARAM_INVALID, "[Check][Param] user_input size = %zu, graph data op index = %ld.",
  1414. user_input.size(), index);
  1415. return FAILED;
  1416. }
  1417. if (IsDynamicDims(input_node)) {
  1418. continue;
  1419. }
  1420. GeTensorDesc desc(user_input[index].GetTensorDesc());
  1421. // data maybe internal format [FRACTAL_NZ] at singleop process such as GEMM.
  1422. ret = CheckInternalFormat(input_node, desc);
  1423. if (ret != SUCCESS) {
  1424. GELOGE(INTERNAL_ERROR, "[Check][InternalFormat] on %s failed", op->GetName().c_str());
  1425. return ret;
  1426. }
  1427. ret = UpdateDataInputOutputDesc(index, op, desc);
  1428. if (ret != SUCCESS) {
  1429. GELOGE(FAILED, "[Update][DataInputOutputDesc] on %s failed", op->GetName().c_str());
  1430. return ret;
  1431. }
  1432. if (!dynamic_shape_range_vec.empty()) {
  1433. ret = UpdateDynamicInputShapeRange(index, dynamic_shape_range_vec, op, desc);
  1434. GE_CHK_STATUS_RET(ret, "[Update][DynamicInputShapeRange] on %s failed.", op->GetName().c_str());
  1435. continue;
  1436. }
  1437. if (!options_.train_graph_flag) {
  1438. Status ret = AdjustDataOpOutput(input_node);
  1439. GE_IF_BOOL_EXEC(ret != SUCCESS, GELOGE(ret, "[Adjust][DataOpOutput] fail, ret:%u", ret); return ret);
  1440. }
  1441. }
  1442. }
  1443. return SUCCESS;
  1444. }
  1445. Status GraphPrepare::TryDoAipp() {
  1446. // infer and with aipp configure file, then call aipp insert
  1447. if ((!options_.train_graph_flag) && (!options_.insert_op_file.empty())) {
  1448. GE_DUMP(compute_graph_, "Before_insert_aipp");
  1449. Status ret = ge::InsertNewOpUtil::Instance().Init();
  1450. if (ret != SUCCESS) {
  1451. GELOGE(INTERNAL_ERROR, "[Init][InsertNewOpUtil] failed.");
  1452. return INTERNAL_ERROR;
  1453. }
  1454. ret = ge::InsertNewOpUtil::Instance().Parse(options_.insert_op_file.c_str());
  1455. if (ret != SUCCESS) {
  1456. GELOGE(GE_GRAPH_OPTIMIZE_INSERT_OP_PARSE_FAILED, "[Parse][ConfigFile] %s failed",
  1457. options_.insert_op_file.c_str());
  1458. return GE_GRAPH_OPTIMIZE_INSERT_OP_PARSE_FAILED;
  1459. }
  1460. ret = ge::InsertNewOpUtil::Instance().InsertAippOps(compute_graph_, options_.insert_op_file);
  1461. if (ret != SUCCESS) {
  1462. GELOGE(GE_GRAPH_OPTIMIZE_INSERT_DYN_OP_FAILED, "[Insert][AippOps] failed, ret:%u", ret);
  1463. return GE_GRAPH_OPTIMIZE_INSERT_DYN_OP_FAILED;
  1464. }
  1465. }
  1466. return SUCCESS;
  1467. }
  1468. Status GraphPrepare::FormatAndShapeProcess() {
  1469. Status ret = ResourcePairProcess("add");
  1470. if (ret != SUCCESS) {
  1471. GELOGE(ret, "ResourcePairProcess failed");
  1472. return ret;
  1473. }
  1474. GE_TIMESTAMP_START(InferOriginFormat1);
  1475. ret = compute_graph_->InferOriginFormat();
  1476. GE_TIMESTAMP_END(InferOriginFormat1, "GraphPrepare::InferOriginFormat1");
  1477. GE_DUMP(compute_graph_, "after_first_inferformat");
  1478. if (ret != SUCCESS) {
  1479. GELOGE(ret, "[Call][InferOriginFormat] Prepare Graph first inferformat failed");
  1480. return ret;
  1481. }
  1482. GE_TIMESTAMP_START(InferShapeForPreprocess);
  1483. ret = InferShapeForPreprocess();
  1484. GE_TIMESTAMP_END(InferShapeForPreprocess, "GraphPrepare::InferShapeForPreprocess");
  1485. GE_DUMP(compute_graph_, "after_infershape");
  1486. if (ret != SUCCESS) {
  1487. GELOGE(GE_GRAPH_INFERSHAPE_FAILED, "[Call][InferShapeForPreprocess] Prepare Graph infershape failed");
  1488. return GE_GRAPH_INFERSHAPE_FAILED;
  1489. }
  1490. GE_TIMESTAMP_START(InferOriginFormat2);
  1491. ret = compute_graph_->InferOriginFormat();
  1492. GE_TIMESTAMP_END(InferOriginFormat2, "GraphPrepare::InferOriginFormat2");
  1493. if (ret != SUCCESS) {
  1494. GELOGE(ret, "[Call][InferOriginFormat] Prepare Graph inferformat failed");
  1495. return ret;
  1496. }
  1497. ret = ResourcePairProcess("remove");
  1498. if (ret != SUCCESS) {
  1499. return ret;
  1500. }
  1501. return ret;
  1502. }
  1503. Status GraphPrepare::ResourcePairProcess(const std::string &action) {
  1504. PassManager control_pass;
  1505. // Graph pass tmp logic for resource infershape
  1506. if (options_.train_graph_flag) {
  1507. try {
  1508. if (action == "add") {
  1509. (void)control_pass.AddPass("ResourcePairProcess::ResourcePairAddControlPass", new ResourcePairAddControlPass);
  1510. } else {
  1511. (void)control_pass.AddPass("ResourcePairProcess::ResourcePairRemoveControlPass",
  1512. new ResourcePairRemoveControlPass);
  1513. }
  1514. } catch (std::bad_alloc &e) {
  1515. REPORT_INNER_ERROR("E19999", "bad memory allocation occur when add ResourcePair Pass");
  1516. GELOGE(INTERNAL_ERROR, "[Add][Pass] failed, bad memory allocation occur, action:%s.", action.c_str());
  1517. return INTERNAL_ERROR;
  1518. }
  1519. }
  1520. Status ret = control_pass.Run(compute_graph_);
  1521. if (ret != SUCCESS && ret != NOT_CHANGED) {
  1522. GELOGE(ret, "[Run][ResourcePairControlPass] failed, action:%s, ret:%u.", action.c_str(), ret);
  1523. return ret;
  1524. }
  1525. return SUCCESS;
  1526. }
  1527. Status GraphPrepare::UpdateDataNetOutputByStorageFormat() {
  1528. for (auto &node_ptr : compute_graph_->GetAllNodes()) {
  1529. GE_CHECK_NOTNULL(node_ptr);
  1530. if (node_ptr->GetType() == DATA) {
  1531. uint32_t index = 0;
  1532. auto op_desc = node_ptr->GetOpDesc();
  1533. GE_CHECK_NOTNULL(op_desc);
  1534. const GeTensorDescPtr input = op_desc->MutableInputDesc(index);
  1535. Format storage_format = FORMAT_RESERVED;
  1536. vector<int64_t> dst_shape_dims;
  1537. if (GetStorageFormatAndShape(op_desc, input, storage_format, dst_shape_dims) != SUCCESS) {
  1538. GELOGE(INTERNAL_ERROR, "[Get][StorageFormatAndShape] for input failed, op:%s, index:0",
  1539. op_desc->GetName().c_str());
  1540. return FAILED;
  1541. }
  1542. if (storage_format == FORMAT_RESERVED) {
  1543. continue;
  1544. }
  1545. if (ModifyDataNetOutputFormatAndShape(op_desc, index, storage_format, dst_shape_dims) != SUCCESS) {
  1546. GELOGE(INTERNAL_ERROR, "[Modify][DataNetOutputFormatAndShape] for input failed, op:%s, index:0",
  1547. op_desc->GetName().c_str());
  1548. return FAILED;
  1549. }
  1550. }
  1551. if (node_ptr->GetType() == ge::NETOUTPUT) {
  1552. auto op_desc = node_ptr->GetOpDesc();
  1553. GE_CHECK_NOTNULL(op_desc);
  1554. for (uint32_t index = 0; index < op_desc->GetOutputsSize(); index++) {
  1555. const GeTensorDescPtr output = op_desc->MutableOutputDesc(index);
  1556. Format storage_format = FORMAT_RESERVED;
  1557. vector<int64_t> dst_shape_dims;
  1558. if (GetStorageFormatAndShape(op_desc, output, storage_format, dst_shape_dims) != SUCCESS) {
  1559. GELOGE(INTERNAL_ERROR, "[Get][StorageFormatAndShape] from output failed, op:%s, index:%u",
  1560. op_desc->GetName().c_str(), index);
  1561. return FAILED;
  1562. }
  1563. if (storage_format == FORMAT_RESERVED) {
  1564. continue;
  1565. }
  1566. if (ModifyDataNetOutputFormatAndShape(op_desc, index, storage_format, dst_shape_dims) != SUCCESS) {
  1567. GELOGE(INTERNAL_ERROR, "[Modify][DataNetOutputFormatAndShape] for output failed, op:%s, index:%u",
  1568. op_desc->GetName().c_str(), index);
  1569. return FAILED;
  1570. }
  1571. }
  1572. }
  1573. }
  1574. return SUCCESS;
  1575. }
  1576. Status GraphPrepare::SaveOriginalGraphToOmModel() {
  1577. if (options_.save_original_model == "true") {
  1578. ModelHelper model_helper;
  1579. Status ret = model_helper.SaveOriginalGraphToOmModel(ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph_),
  1580. options_.original_model_file);
  1581. if (ret != SUCCESS) {
  1582. // If save original model fail, process continue
  1583. GELOGW("SaveOriginalGraphToOmModel fail");
  1584. }
  1585. }
  1586. return SUCCESS;
  1587. }
  1588. #define PP_RUN_AND_DUMP(name, func, ...) \
  1589. do { \
  1590. GE_RUN(Prepare, func, __VA_ARGS__); \
  1591. GE_DUMP(compute_graph, "PrepareAfter" name); \
  1592. GELOGI("Prepare %s on graph %s success.", name, compute_graph->GetName().c_str()); \
  1593. } while (0)
  1594. #define PP_RUN(name, func, ...) \
  1595. do { \
  1596. GE_RUN(Prepare, func, __VA_ARGS__); \
  1597. GELOGI("Prepare %s on graph %s success.", name, compute_graph->GetName().c_str()); \
  1598. } while (0)
  1599. Status GraphPrepare::PrepareDynShape(const GraphNodePtr &graph_node, const std::vector<GeTensor> &user_input,
  1600. ge::ComputeGraphPtr &compute_graph, uint64_t session_id) {
  1601. GE_CHECK_NOTNULL(graph_node->GetGraph());
  1602. GE_CHECK_NOTNULL(compute_graph);
  1603. GetLocalOmgContext().type = static_cast<domi::FrameworkType>(options_.framework_type);
  1604. const Graph &const_graph = *graph_node->GetGraph();
  1605. PP_RUN("Init", Init, const_graph, session_id);
  1606. PP_RUN("SetRtContext", SetRtContext, rtContext_t(), RT_CTX_GEN_MODE);
  1607. PP_RUN_AND_DUMP("CheckAndUpdateInput", CheckAndUpdateInput, user_input, graph_node->GetOptions());
  1608. PP_RUN_AND_DUMP("GraphEquivalentTransformation", GraphEquivalentTransformation);
  1609. PP_RUN_AND_DUMP("ProcessOutput", ProcessNetOutput);
  1610. PP_RUN_AND_DUMP("ProcessMultiBatch", multibatch::ProcessMultiBatch, compute_graph_);
  1611. PP_RUN_AND_DUMP("InsertAipp", TryDoAipp);
  1612. PP_RUN_AND_DUMP("ProcessBeforeInfershape", ProcessBeforeInfershape);
  1613. PP_RUN_AND_DUMP("InferFormatAndShape", FormatAndShapeProcess);
  1614. PP_RUN_AND_DUMP("CtrlFlowPreProcess", CtrlFlowPreProcess);
  1615. PP_RUN_AND_DUMP("GetDynamicOutputShape", multibatch::GetDynamicOutputShape, compute_graph_);
  1616. PP_RUN_AND_DUMP("ProcessAippStage2", InsertNewOpUtil::Instance().UpdateDataNodeByAipp, compute_graph_);
  1617. PP_RUN("SaveOriginalGraphToOmModel", SaveOriginalGraphToOmModel);
  1618. PP_RUN_AND_DUMP("PrepareOptimize", PrepareOptimize);
  1619. return SUCCESS;
  1620. }
  1621. Status GraphPrepare::CtrlFlowPreProcess() {
  1622. PassManager graph_pass;
  1623. // After InferShape Mark v1 control flow for unknown shape.
  1624. GE_CHK_STATUS_RET(graph_pass.AddPass("PreRun::MarkForceUnknownForCondPass",
  1625. new (std::nothrow) MarkForceUnknownForCondPass));
  1626. GE_CHK_STATUS_RET(graph_pass.Run(compute_graph_));
  1627. return SUCCESS;
  1628. }
  1629. Status GraphPrepare::RecordAIPPInfo(ge::ComputeGraphPtr &compute_graph) {
  1630. PP_RUN("RecordAIPPInfo", InsertNewOpUtil::Instance().RecordAIPPInfoToData, compute_graph_);
  1631. return SUCCESS;
  1632. }
  1633. Status GraphPrepare::PrepareRunningFormatRefiner() {
  1634. auto compute_graph = compute_graph_;
  1635. PassManager pass_manager;
  1636. GE_CHK_STATUS_RET(pass_manager.AddPass("PrepareRunningFormatRefiner::VariablePrepareOpPass",
  1637. new (std::nothrow) VariablePrepareOpPass))
  1638. GE_TIMESTAMP_START(pass_manager);
  1639. auto ret = pass_manager.Run(compute_graph);
  1640. GE_TIMESTAMP_END(pass_manager, "GraphPrepare::PrepareRunningFormatRefiner");
  1641. if (ret != SUCCESS && ret != NOT_CHANGED) {
  1642. GELOGE(ret, "[Run][Passes] for running format refiner failed, ret:%u.", ret);
  1643. return ret;
  1644. }
  1645. PP_RUN_AND_DUMP("UpdateInputOutputByUserOptions", UpdateInputOutputByOptions);
  1646. PP_RUN_AND_DUMP("UpdateVariableFormats", UpdateVariableFormats, compute_graph_);
  1647. return SUCCESS;
  1648. }
  1649. Status GraphPrepare::SwitchOpOptimize(ComputeGraphPtr &compute_graph) {
  1650. if (compute_graph == nullptr) {
  1651. REPORT_INNER_ERROR("E19999", "Param compute_graph is nullptr, check invalid");
  1652. GELOGE(GE_GRAPH_NULL_INPUT, "[Check][Param] Input Graph is NULL");
  1653. return GE_GRAPH_NULL_INPUT;
  1654. }
  1655. GEPass ge_passes(compute_graph);
  1656. NamesToPass hccl_group;
  1657. HcclGroupPass hccl_group_pass;
  1658. GELOGD("Add hccl group pass success");
  1659. hccl_group.emplace_back("HcclGroupPass", &hccl_group_pass);
  1660. auto ret = ge_passes.Run(hccl_group);
  1661. if (ret != SUCCESS) {
  1662. GELOGE(ret, "[Run][HcclGroupPass] pass for preprocess failed, ret:%u.", ret);
  1663. return ret;
  1664. }
  1665. ret = compute_graph->TopologicalSorting();
  1666. if (ret != SUCCESS) {
  1667. REPORT_CALL_ERROR("E19999", "Topological sorting failed");
  1668. GELOGE(ret, "[Call][TopologicalSorting] Graph topological sort failed, ret:%u.", ret);
  1669. return ret;
  1670. }
  1671. return SUCCESS;
  1672. }
  1673. #undef PP_RUN_AND_DUMP
  1674. #undef PP_RUN
  1675. Status GraphPrepare::GenerateInfershapeGraph(ConstGraphPtr graph) {
  1676. if (graph == nullptr) {
  1677. REPORT_INNER_ERROR("E19999", "Param graph is nullptr, check invalid");
  1678. GELOGE(GE_GRAPH_NULL_INPUT, "[Check][Param] Input Graph is NULL");
  1679. return GE_GRAPH_NULL_INPUT;
  1680. }
  1681. const Graph &const_graph = *graph;
  1682. Status ret = Init(const_graph, 0);
  1683. if (ret != SUCCESS) {
  1684. GELOGE(ret, "[Init][GraphPrepare] fail, ret:%u", ret);
  1685. return ret;
  1686. }
  1687. GE_DUMP(compute_graph_, "after_parser");
  1688. GELOGI("Start infershape for dump json process.");
  1689. ret = compute_graph_->InferOriginFormat();
  1690. GE_DUMP(compute_graph_, "after_inferformat");
  1691. if (ret != SUCCESS) {
  1692. REPORT_CALL_ERROR("E19999", "Infer OriginFormat failed");
  1693. GELOGE(ret, "[Infer][OriginFormat] failed");
  1694. return ret;
  1695. }
  1696. InferShapePass infer_shape_pass;
  1697. NamesToPass names_to_passes;
  1698. names_to_passes.emplace_back("InferShapePass", &infer_shape_pass);
  1699. GEPass ge_passes(compute_graph_);
  1700. ret = ge_passes.Run(names_to_passes);
  1701. GE_DUMP(compute_graph_, "after_infershape");
  1702. if (ret != SUCCESS) {
  1703. GELOGE(ret, "[Run][GePasses] infershape for preprocess failed, ret:%u.", ret);
  1704. return ret;
  1705. }
  1706. ShapeRefiner::ClearContextMap();
  1707. return SUCCESS;
  1708. }
  1709. Status GraphPrepare::CheckConstOp() {
  1710. for (auto &node_ptr : compute_graph_->GetAllNodes()) {
  1711. GE_CHECK_NOTNULL(node_ptr);
  1712. if (node_ptr->GetType() == CONSTANT) {
  1713. Status ret = VerifyConstOp(node_ptr);
  1714. GE_CHK_BOOL_RET_STATUS(ret == SUCCESS, ret, "Const Op Check failed");
  1715. } else if (node_ptr->GetType() == FRAMEWORKOP) {
  1716. auto op_desc = node_ptr->GetOpDesc();
  1717. if (op_desc == nullptr) {
  1718. REPORT_INNER_ERROR("E19999", "op_desc is nullptr, check invalid");
  1719. GELOGE(PARAM_INVALID, "[Get][OpDesc] of node failed, op_desc is nullptr, node type:FRAMEWORKOP.");
  1720. return PARAM_INVALID;
  1721. }
  1722. std::string original_type;
  1723. GE_IF_BOOL_EXEC(ge::AttrUtils::GetStr(op_desc, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, original_type),
  1724. GELOGI("Get FrameWorkOp original type [%s]", original_type.c_str()));
  1725. GELOGI("original type is %s", original_type.c_str());
  1726. if (original_type == CONSTANT) {
  1727. Status ret = VerifyConstOp(node_ptr);
  1728. GE_CHK_BOOL_RET_STATUS(ret == SUCCESS, ret, "Const Op Check failed");
  1729. }
  1730. }
  1731. }
  1732. return SUCCESS;
  1733. }
  1734. Status GraphPrepare::VerifyConstOp(const NodePtr &node) {
  1735. GE_CHECK_NOTNULL(node);
  1736. auto op_desc = node->GetOpDesc();
  1737. GE_CHECK_NOTNULL(op_desc);
  1738. ConstGeTensorPtr ge_tensor_ptr;
  1739. if (!(AttrUtils::GetTensor(op_desc, ATTR_NAME_WEIGHTS, ge_tensor_ptr))) {
  1740. REPORT_INNER_ERROR("E19999", "Get Attr:%s of op:%s(%s) failed", ATTR_NAME_WEIGHTS.c_str(),
  1741. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  1742. GELOGE(PARAM_INVALID, "[Get][Attr] %s of op:%s(%s) failed", ATTR_NAME_WEIGHTS.c_str(),
  1743. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  1744. return PARAM_INVALID;
  1745. }
  1746. GE_CHECK_NOTNULL(ge_tensor_ptr);
  1747. auto data_size = ge_tensor_ptr->GetData().GetSize();
  1748. auto ge_tensor_desc = ge_tensor_ptr->GetTensorDesc();
  1749. int64_t shape_size = ge_tensor_desc.GetShape().GetShapeSize();
  1750. auto data_type = ge_tensor_desc.GetDataType();
  1751. if (data_type == DT_STRING) {
  1752. return SUCCESS;
  1753. }
  1754. uint32_t length = 1;
  1755. bool type_ret = TypeUtils::GetDataTypeLength(data_type, length);
  1756. if (!type_ret) {
  1757. REPORT_INNER_ERROR("E19999", "const node:%s's input datatype:%s it is not support",
  1758. node->GetName().c_str(), TypeUtils::DataTypeToSerialString(data_type).c_str());
  1759. GELOGE(PARAM_INVALID, "[Check][Param] Input datatype %s is not support.",
  1760. TypeUtils::DataTypeToSerialString(data_type).c_str());
  1761. return FAILED;
  1762. }
  1763. FMK_INT64_UINT32_MULCHECK(shape_size, length);
  1764. GELOGI("Const real value Size:%zu, op_desc Shape Size:%ld, data_type:%s.", data_size, shape_size * length,
  1765. TypeUtils::DataTypeToSerialString(data_type).c_str());
  1766. if (shape_size == 0) {
  1767. if (ge_tensor_desc.GetShape().GetDims().size() == 0) {
  1768. // shape = [], means it's a sclar tensor.
  1769. GE_CHK_BOOL_EXEC(data_size / length == 1,
  1770. REPORT_INNER_ERROR("E19999", "Const Node:%s is invalid, data size:%zu not equal to tensor size:%u",
  1771. node->GetName().c_str(), data_size, length);
  1772. return PARAM_INVALID, "[Check][Param] Const is invalid scalar tensor.");
  1773. } else {
  1774. // shape = [x, y, 0,...], means it's a vector tensor that value is [].
  1775. GE_CHK_BOOL_EXEC(data_size == 0,
  1776. REPORT_INNER_ERROR("E19999", "Const Node:%s is invalid, data size:%zu not equal to tensor size:0",
  1777. node->GetName().c_str(), data_size);
  1778. return PARAM_INVALID, "[Check][Param] Const is invalid vector scalar.");
  1779. }
  1780. } else {
  1781. GE_CHK_BOOL_EXEC(
  1782. data_size == static_cast<size_t>(shape_size * length) && data_size != 0,
  1783. REPORT_INNER_ERROR("E19999", "Const Node:%s is invalid, data size:%zu not equal to tensor size:%ld",
  1784. node->GetName().c_str(), data_size, shape_size * length);
  1785. return PARAM_INVALID, "[Check][Param] Const input data size is not equal with tensor desc shape");
  1786. }
  1787. return SUCCESS;
  1788. }
  1789. bool GraphPrepare::IsDynamicDims(const NodePtr &input_node) {
  1790. auto data_shape = NodeUtils::GetOutputDesc(*input_node, kDataOutIndex).GetShape();
  1791. const auto &dims = data_shape.GetDims();
  1792. bool all_is_positive = false;
  1793. if (std::all_of(dims.begin(), dims.end(), [](int64_t val) { return val >= 0; })) {
  1794. all_is_positive = true;
  1795. }
  1796. if (!all_is_positive && !options_.input_shape.empty() && !options_.dynamic_dims.empty() &&
  1797. options_.dynamic_node_type != kInvalidDynaimcDimsType) {
  1798. GELOGI("No need to check and update desc info, the dims of %s is %s.", input_node->GetName().c_str(),
  1799. formats::JoinToString(dims).c_str());
  1800. return true;
  1801. }
  1802. return false;
  1803. }
  1804. Status GraphPrepare::CheckUserInput(const std::vector<GeTensor> &user_input) {
  1805. if (GetLocalOmgContext().is_dynamic_input) {
  1806. return SUCCESS;
  1807. }
  1808. unsigned int node_num = 0;
  1809. unsigned int data_num = 0;
  1810. for (NodePtr &input_node : compute_graph_->GetDirectNode()) {
  1811. GE_CHECK_NOTNULL(input_node);
  1812. OpDescPtr op = input_node->GetOpDesc();
  1813. GE_CHECK_NOTNULL(op);
  1814. node_num++;
  1815. if (op->GetType() == DATA || op->GetType() == AIPPDATA) {
  1816. data_num++;
  1817. GeAttrValue::INT index = 0;
  1818. if (!(AttrUtils::GetInt(op, ATTR_NAME_INDEX, index))) {
  1819. REPORT_INNER_ERROR("E19999", "Get Attr:%s of op:%s(%s) failed", ATTR_NAME_WEIGHTS.c_str(),
  1820. op->GetName().c_str(), op->GetType().c_str());
  1821. GELOGE(GE_GRAPH_INIT_FAILED, "[Get][Attr] %s of op:%s(%s) failed", ATTR_NAME_WEIGHTS.c_str(),
  1822. op->GetName().c_str(), op->GetType().c_str());
  1823. return GE_GRAPH_INIT_FAILED;
  1824. }
  1825. if ((index < 0) || (static_cast<size_t>(index) >= user_input.size())) {
  1826. std::string reason = "exist data op:" + input_node->GetName() + " index " + std::to_string(index) +
  1827. " bigger than input tensor size[" + std::to_string(user_input.size()) + "], check invalid";
  1828. REPORT_INPUT_ERROR("E19025", std::vector<std::string>({"reason"}), std::vector<std::string>({reason}));
  1829. GELOGE(GE_GRAPH_INIT_FAILED, "[Check][Param] user_input size:%zu must larger than data op index:%ld.",
  1830. user_input.size(), index);
  1831. return GE_GRAPH_INIT_FAILED;
  1832. }
  1833. if (IsDynamicDims(input_node)) {
  1834. continue;
  1835. }
  1836. GeTensorDesc desc(user_input[index].GetTensorDesc());
  1837. for (size_t i = 0; i < desc.GetShape().GetDimNum(); ++i) {
  1838. int64_t dim = desc.GetShape().GetDim(i);
  1839. if (dim < UNKNOWN_DIM_NUM) {
  1840. std::string reason = "data dim[" + std::to_string(i) + "][" + std::to_string(dim) + "] of index:" +
  1841. std::to_string(index) + " input tensor it need >= -2";
  1842. REPORT_INPUT_ERROR(
  1843. "E19025", std::vector<std::string>({"reason"}), std::vector<std::string>({reason}));
  1844. GELOGE(GE_GRAPH_INIT_FAILED, "[Check][InputDim]data dim %zu is not supported, need >= -2, real:%ld.", i, dim);
  1845. return GE_GRAPH_INIT_FAILED;
  1846. }
  1847. }
  1848. }
  1849. }
  1850. if (node_num <= data_num) {
  1851. GELOGW("Prepare check user input, data_num = %u, node_num = %u", data_num, node_num);
  1852. }
  1853. return SUCCESS;
  1854. }
  1855. Status GraphPrepare::InferShapeForPreprocess() {
  1856. GELOGI("Start infershape for preprocess.");
  1857. // Prepare dummy_shape for v1 control_flow op before infershape
  1858. for (const auto &node : compute_graph_->GetAllNodes()) {
  1859. string type;
  1860. GetOriginalType(node, type);
  1861. if (type == MERGE || type == REFMERGE) {
  1862. for (size_t i = 0; i < node->GetAllInDataAnchorsSize(); ++i) {
  1863. GELOGD("Prepare for infershape: update %s input_shape as dummy.", node->GetName().c_str());
  1864. NodeUtils::UpdateInputShape(*node, i, GeShape(DUMMY_SHAPE));
  1865. }
  1866. } else if (type == WHILE) {
  1867. for (size_t i = 0; i < node->GetAllInDataAnchorsSize(); ++i) {
  1868. GELOGD("Prepare for infershape: update %s output_shape as dummy.", node->GetName().c_str());
  1869. NodeUtils::UpdateOutputShape(*node, i, GeShape(DUMMY_SHAPE));
  1870. }
  1871. }
  1872. }
  1873. GEPass ge_passes(compute_graph_);
  1874. NamesToPass names_to_passes;
  1875. AssertPass assert_pass;
  1876. if (!options_.train_graph_flag) {
  1877. names_to_passes.emplace_back("AssertPass", &assert_pass);
  1878. }
  1879. SwitchDeadBranchElimination switch_dead_branch_elimination;
  1880. names_to_passes.emplace_back("SwitchDeadBranchElimination", &switch_dead_branch_elimination);
  1881. MergePass merge_pass;
  1882. names_to_passes.emplace_back("MergePass", &merge_pass);
  1883. InferShapePass infer_shape_pass;
  1884. names_to_passes.emplace_back("InferShapePass", &infer_shape_pass);
  1885. ReplaceWithEmptyConstPass replace_with_empty_const_pass;
  1886. names_to_passes.emplace_back("ReplaceWithEmptyConstPass", &replace_with_empty_const_pass);
  1887. DimensionComputePass dimension_compute_pass;
  1888. names_to_passes.emplace_back("DimensionComputePass", &dimension_compute_pass);
  1889. ConstantFoldingPass constant_folding_pass;
  1890. names_to_passes.emplace_back("ConstantFoldingPass", &constant_folding_pass);
  1891. InferValueRangePass infer_value_pass;
  1892. names_to_passes.emplace_back("InferValuePass", &infer_value_pass);
  1893. int32_t dev_count = 0;
  1894. AicpuConstantFoldingPass aicpu_constant_folding_pass;
  1895. const char *aicpu_constant_folding_on = std::getenv("AICPU_CONSTANT_FOLDING_ON");
  1896. rtError_t rt_err = RT_ERROR_NONE;
  1897. if (aicpu_constant_folding_on != nullptr) {
  1898. rt_err = rtGetDeviceCount(&dev_count);
  1899. if (rt_err == RT_ERROR_NONE) {
  1900. Status result = SetRtContext(rtContext_t(), RT_CTX_NORMAL_MODE);
  1901. if (result != SUCCESS) {
  1902. GELOGE(result, "[Set][RtContext] failed, mode = RT_CTX_NORMAL_MODE.");
  1903. return result;
  1904. }
  1905. names_to_passes.emplace_back("AicpuConstantFoldingPass", &aicpu_constant_folding_pass);
  1906. }
  1907. }
  1908. Status ret = ge_passes.Run(names_to_passes);
  1909. if (aicpu_constant_folding_on != nullptr) {
  1910. if (rt_err == RT_ERROR_NONE) {
  1911. Status result = SetRtContext(rtContext_t(), RT_CTX_GEN_MODE);
  1912. if (result != SUCCESS) {
  1913. GELOGE(result, "[Set][RtContext] failed, mode = RT_CTX_GEN_MODE.");
  1914. return result;
  1915. }
  1916. }
  1917. }
  1918. ShapeRefiner::ClearContextMap();
  1919. if (ret != SUCCESS) {
  1920. GELOGE(ret, "[Run][GePasses] infershape for preprocess failed, ret:%u.", ret);
  1921. return ret;
  1922. }
  1923. return SUCCESS;
  1924. }
  1925. Status GraphPrepare::PrepareOptimize() {
  1926. GELOGI("Start optimize for preprocess.");
  1927. // check rw type
  1928. GraphOptimize graph_optimize;
  1929. bool has_conflict = false;
  1930. graph_optimize.CheckRWConflict(compute_graph_, has_conflict);
  1931. if (has_conflict) {
  1932. GELOGE(GRAPH_PARAM_INVALID, "[Check][RWConflict] There has rw conflict.Stop optimize.");
  1933. return FAILED;
  1934. }
  1935. PassManager original_graph_passes;
  1936. // Graph pass
  1937. try {
  1938. (void)original_graph_passes.AddPass("PrepareOptimize::ShapeOperateOpRemovePass", new ShapeOperateOpRemovePass);
  1939. (void)original_graph_passes.AddPass("PrepareOptimize::ReplaceTransShapePass", new ReplaceTransShapePass);
  1940. (void)original_graph_passes.AddPass("PrepareOptimize::MarkAgnosticPass", new MarkAgnosticPass);
  1941. } catch (std::bad_alloc &e) {
  1942. REPORT_INNER_ERROR("E19999", "bad memory allocation occur when add Pass");
  1943. GELOGE(INTERNAL_ERROR, "[Add][Pass] failed, bad memory allocation occurs.");
  1944. return INTERNAL_ERROR;
  1945. }
  1946. GE_TIMESTAMP_START(original_graph_passes);
  1947. Status ret = original_graph_passes.Run(compute_graph_);
  1948. GE_TIMESTAMP_END(original_graph_passes, "GraphPrepare::OriginalGraphPasses");
  1949. if (ret != SUCCESS && ret != NOT_CHANGED) {
  1950. GELOGE(ret, "[Run][GraphPasses] optimize for preprocess failed, ret:%u.", ret);
  1951. return ret;
  1952. }
  1953. // New pass
  1954. GEPass ge_passes(compute_graph_);
  1955. NamesToPass names_to_passes;
  1956. EnterPass enter_pass;
  1957. names_to_passes.emplace_back("EnterPass", &enter_pass);
  1958. CondPass cond_pass;
  1959. names_to_passes.emplace_back("CondPass", &cond_pass);
  1960. PrintOpPass print_pass;
  1961. if (options_.enable_print_op_pass) {
  1962. names_to_passes.emplace_back("PrintOpPass", &print_pass);
  1963. }
  1964. NoUseReshapeRemovePass no_use_reshape_remove_pass;
  1965. names_to_passes.emplace_back("NoUseReshapeRemovePass", &no_use_reshape_remove_pass);
  1966. DropOutPass dropout_pass;
  1967. AssertPass assert_pass;
  1968. UnusedConstPass unused_const_pass;
  1969. StopGradientPass stop_gradient_pass;
  1970. PreventGradientPass prevent_gradient_pass;
  1971. PlaceholderWithDefaultPass placeholder_with_default_pass;
  1972. GuaranteeConstPass guarantee_const_pass;
  1973. VarIsInitializedOpPass var_is_initialized_pass;
  1974. ParallelConcatStartOpPass parallel_concat_start_op_pass;
  1975. IdentityPass identity_pass(false);
  1976. SnapshotPass snapshot_pass;
  1977. if (!options_.train_graph_flag) {
  1978. names_to_passes.emplace_back("DropOutPass", &dropout_pass);
  1979. names_to_passes.emplace_back("AssertPass", &assert_pass);
  1980. }
  1981. names_to_passes.emplace_back("UnusedConstPass", &unused_const_pass);
  1982. names_to_passes.emplace_back("StopGradientPass", &stop_gradient_pass);
  1983. names_to_passes.emplace_back("PreventGradientPass", &prevent_gradient_pass);
  1984. names_to_passes.emplace_back("PlaceholderWithDefaultPass", &placeholder_with_default_pass);
  1985. names_to_passes.emplace_back("SnapshotPass", &snapshot_pass);
  1986. names_to_passes.emplace_back("GuaranteeConstPass", &guarantee_const_pass);
  1987. names_to_passes.emplace_back("VarIsInitializedOpPass", &var_is_initialized_pass);
  1988. names_to_passes.emplace_back("ParallelConcatStartOpPass", &parallel_concat_start_op_pass);
  1989. names_to_passes.emplace_back("IdentityPass", &identity_pass);
  1990. GE_TIMESTAMP_START(names_to_passes);
  1991. ret = ge_passes.Run(names_to_passes);
  1992. GE_TIMESTAMP_END(names_to_passes, "GraphPrepare::NamesToPasses");
  1993. if (ret != SUCCESS) {
  1994. GELOGE(ret, "[Run][GePasses] optimize for preprocess failed, ret:%u.", ret);
  1995. return ret;
  1996. }
  1997. PassManager graph_pass;
  1998. try {
  1999. (void)graph_pass.AddPass("PrepareOptimize::PrunePass", new PrunePass);
  2000. // can't move to optimize1/2 directly, may cause more identity insert, cause CI fail
  2001. (void)graph_pass.AddPass("PrepareOptimize::HcclMemcpyPass", new HcclMemcpyPass);
  2002. } catch (std::bad_alloc &e) {
  2003. REPORT_INNER_ERROR("E19999", "bad memory allocation occur when add Pass");
  2004. GELOGE(INTERNAL_ERROR, "[Add][Pass] failed, bad memory allocation occurs.");
  2005. return INTERNAL_ERROR;
  2006. }
  2007. GE_TIMESTAMP_START(graph_passes);
  2008. ret = graph_pass.Run(compute_graph_);
  2009. GE_TIMESTAMP_END(graph_passes, "GraphPrepare::GraphPasses");
  2010. if (ret != SUCCESS && ret != NOT_CHANGED) {
  2011. GELOGE(ret, "[Run][GraphPasses] optimize for preprocess failed, ret:%u.", ret);
  2012. return ret;
  2013. }
  2014. // The constant for train is CONSTANTOP, and is CONSTANT for inference. They will be unified in future.
  2015. TypeConversionOfConstant();
  2016. ret = compute_graph_->TopologicalSorting();
  2017. if (ret != SUCCESS) {
  2018. REPORT_CALL_ERROR("E19999", "Topological sorting failed");
  2019. GELOGE(ret, "[Call][TopologicalSorting] Graph topological sort failed, ret:%u.", ret);
  2020. return ret;
  2021. }
  2022. GELOGI("End optimize for preprocess.");
  2023. return SUCCESS;
  2024. }
  2025. void GraphPrepare::TypeConversionOfConstant() {
  2026. bool is_acl_compile = false;
  2027. for (ge::NodePtr &n : compute_graph_->GetAllNodes()) {
  2028. // This can ensure that n is not a null pointer
  2029. // No Conversion when called by aclOpCompile
  2030. (void)AttrUtils::GetBool(n->GetOpDesc(), ATTR_SINGLE_OP_SCENE, is_acl_compile);
  2031. if (is_acl_compile) {
  2032. return;
  2033. }
  2034. }
  2035. if (options_.train_graph_flag) {
  2036. GELOGD("trans CONSTANT to CONSTANTOP in train.");
  2037. for (ge::NodePtr &n : compute_graph_->GetAllNodes()) {
  2038. // This can ensure that n is not a null pointer
  2039. if (n->GetOpDesc()->GetType() == CONSTANT) {
  2040. n->GetOpDesc()->SetType(CONSTANTOP);
  2041. }
  2042. }
  2043. } else {
  2044. GELOGD("trans CONSTANTOP to CONSTANT in inferrence.");
  2045. for (ge::NodePtr &n : compute_graph_->GetAllNodes()) {
  2046. // This can ensure that n is not a null pointer
  2047. if (n->GetOpDesc()->GetType() == CONSTANTOP) {
  2048. n->GetOpDesc()->SetType(CONSTANT);
  2049. }
  2050. }
  2051. }
  2052. }
  2053. Status GraphPrepare::GraphEquivalentTransformation() {
  2054. NamesToPass names_to_pass;
  2055. ForPass for_pass;
  2056. names_to_pass.emplace_back("ForToWhilePass", &for_pass);
  2057. return GEPass(compute_graph_).Run(names_to_pass);
  2058. }
  2059. Status GraphPrepare::ProcessBeforeInfershape() {
  2060. NamesToPass names_to_passes;
  2061. CondRemovePass condition_remove_pass;
  2062. names_to_passes.emplace_back("CondRemovePass", &condition_remove_pass);
  2063. GE_TIMESTAMP_START(ProcessCondRemove);
  2064. auto ret = GEPass(compute_graph_).Run(names_to_passes);
  2065. GE_TIMESTAMP_END(ProcessCondRemove, "GraphManager::ProcessCondRemove");
  2066. if (ret != SUCCESS) {
  2067. GELOGE(ret, "[Run][GEPass] optimize for OptimizeAfterMergeSubGraph failed, ret:%d.", ret);
  2068. return ret;
  2069. }
  2070. return SUCCESS;
  2071. }
  2072. Status GraphPrepare::ProcessNetOutput() {
  2073. PassManager graph_passes_before_infershape;
  2074. try {
  2075. if (options_.train_graph_flag) {
  2076. graph_passes_before_infershape.AddPass("ProcessNetOutput::SavePass", new (std::nothrow) SavePass);
  2077. }
  2078. graph_passes_before_infershape.AddPass("ProcessNetOutput::NetOutputPass", new (std::nothrow) NetOutputPass);
  2079. graph_passes_before_infershape.AddPass("ProcessNetOutput::DataPass",
  2080. new (std::nothrow) DataPass); // Add NetOutput first.
  2081. } catch (std::bad_alloc) {
  2082. REPORT_INNER_ERROR("E19999", "bad memory allocation occur when add Pass");
  2083. GELOGE(INTERNAL_ERROR, "Add pass failed, bad memory allocation occurs.");
  2084. return INTERNAL_ERROR;
  2085. }
  2086. auto ret = graph_passes_before_infershape.Run(compute_graph_);
  2087. if ((ret != SUCCESS) && (ret != NOT_CHANGED)) {
  2088. GELOGE(ret, "[Run][GraphPasses] before Infershape failed, ret:%d.", ret);
  2089. return ret;
  2090. }
  2091. return SUCCESS;
  2092. }
  2093. Status GraphPrepare::CheckAndUpdateInput(const std::vector<GeTensor> &user_input,
  2094. const std::map<string, string> &graph_option) {
  2095. compute_graph_->SetInputSize(user_input.size());
  2096. if (user_input.empty()) {
  2097. return SUCCESS;
  2098. }
  2099. auto ret = CheckUserInput(user_input);
  2100. if (ret != SUCCESS) {
  2101. GELOGE(ret, "[Check][UserInput] failed, ret:%u", ret);
  2102. return ret;
  2103. }
  2104. ret = UpdateInput(user_input, graph_option);
  2105. if (ret != SUCCESS) {
  2106. GELOGE(ret, "[Update][Input] fail, ret:%u", ret);
  2107. return ret;
  2108. }
  2109. if (user_input.size() != 0) {
  2110. ret = CheckConstOp();
  2111. if (ret != SUCCESS) {
  2112. GELOGE(ret, "[Check][ConstOp] fail, ret:%u", ret);
  2113. return ret;
  2114. }
  2115. } else {
  2116. ret = compute_graph_->TopologicalSorting();
  2117. if (ret != SUCCESS) {
  2118. REPORT_CALL_ERROR("E19999", "Topological sorting failed");
  2119. GELOGE(ret, "[Call][TopologicalSorting] failed.");
  2120. return FAILED;
  2121. }
  2122. }
  2123. return SUCCESS;
  2124. }
  2125. Status GraphPrepare::UpdateInputOutputByOptions() {
  2126. auto ret = UpdateDataNetOutputByStorageFormat();
  2127. if (ret != SUCCESS) {
  2128. GELOGE(ret, "[Update][DataNetOutputByStorageFormat] failed.");
  2129. return ret;
  2130. }
  2131. if (options_.train_graph_flag) {
  2132. GELOGI("This is train mode, no need to do this schedule.");
  2133. return SUCCESS;
  2134. }
  2135. for (auto &node_ptr : compute_graph_->GetDirectNode()) {
  2136. GE_CHECK_NOTNULL(node_ptr);
  2137. if (CheckIfNeedSetNdFormat(node_ptr) != SUCCESS) {
  2138. GELOGE(INTERNAL_ERROR, "[Set][NdFormat] for node:%s failed", node_ptr->GetName().c_str());
  2139. return FAILED;
  2140. }
  2141. if (node_ptr->GetType() == DATA) {
  2142. if (ProcessDataNodeDynShape(node_ptr) != SUCCESS) {
  2143. GELOGE(INTERNAL_ERROR, "[Call][ProcessDataNodeDynShape] for node:%s failed", node_ptr->GetName().c_str());
  2144. return FAILED;
  2145. }
  2146. }
  2147. if (node_ptr->GetType() == ge::NETOUTPUT) {
  2148. if (ProcessNetoutputNodeDynShape(node_ptr) != SUCCESS) {
  2149. GELOGE(INTERNAL_ERROR, "[Call][ProcessNetoutputNodeDynShape] for node:%s failed", node_ptr->GetName().c_str());
  2150. return FAILED;
  2151. }
  2152. }
  2153. }
  2154. return SUCCESS;
  2155. }
  2156. bool GraphPrepare::IsTansDataOpData(const ge::NodePtr &var_node) {
  2157. for (auto &out_anchor : var_node->GetAllOutDataAnchors()) {
  2158. GE_RT_FALSE_CHECK_NOTNULL(out_anchor);
  2159. for (auto &in_anchor : out_anchor->GetPeerInDataAnchors()) {
  2160. GE_RT_FALSE_CHECK_NOTNULL(in_anchor);
  2161. ge::NodePtr dst_node = in_anchor->GetOwnerNode();
  2162. GE_RT_FALSE_CHECK_NOTNULL(dst_node);
  2163. if (dst_node->GetType() == TRANSDATA) {
  2164. return true;
  2165. }
  2166. }
  2167. }
  2168. return false;
  2169. }
  2170. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示