You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

hybrid_model_builder.cc 53 kB

5 years ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "hybrid/model/hybrid_model_builder.h"
  17. #include "common/math/math_util.h"
  18. #include "graph/ge_context.h"
  19. #include "graph/utils/node_utils.h"
  20. #include "graph/debug/ge_attr_define.h"
  21. #include "graph/load/new_model_manager/model_utils.h"
  22. #include "graph/manager/graph_var_manager.h"
  23. #include "graph/manager/trans_var_data_utils.h"
  24. #include "graph/utils/graph_utils.h"
  25. #include "graph/utils/type_utils.h"
  26. #include "hybrid/common/npu_memory_allocator.h"
  27. #include "hybrid/node_executor/node_executor.h"
  28. namespace ge {
  29. namespace hybrid {
  30. namespace {
  31. const uint32_t kSubgraphIndex = 0U;
  32. const uint32_t kVarOutputIndex = 0U;
  33. const uint32_t kAlignment = 32;
  34. const int kBytes = 8;
  35. int64_t CalcVarSizeInBytes(const GeTensorDesc &desc) {
  36. int64_t var_size = 0;
  37. auto data_type = desc.GetDataType();
  38. if (data_type == DT_STRING) {
  39. (void)TensorUtils::GetSize(desc, var_size);
  40. } else {
  41. var_size = GetSizeByDataType(data_type);
  42. if (var_size <= 0) {
  43. GELOGW("Failed to calc var data size from data type %s", TypeUtils::DataTypeToSerialString(data_type).c_str());
  44. return -1;
  45. }
  46. auto shape = desc.GetShape();
  47. auto dim_num = shape.GetDimNum();
  48. for (size_t dim_index = 0; dim_index < dim_num; ++dim_index) {
  49. var_size *= shape.GetDim(dim_index);
  50. }
  51. // padding up to multiple of kAlignment, and add extra kAlignment
  52. var_size = (var_size + kAlignment * 2 - 1) / kAlignment * kAlignment;
  53. }
  54. return var_size;
  55. }
  56. } // namespace
  57. HybridModelBuilder::HybridModelBuilder(HybridModel &hybrid_model)
  58. : hybrid_model_(hybrid_model), runtime_param_(hybrid_model.root_runtime_param_) {
  59. ge_root_model_ = hybrid_model_.ge_root_model_;
  60. }
  61. Status HybridModelBuilder::Build() {
  62. GE_CHK_STATUS_RET(ValidateParams(), "Failed to validate GeRootModel");
  63. hybrid_model_.model_name_ = ge_root_model_->GetRootGraph()->GetName();
  64. GELOGI("[%s] Start to build hybrid model.", GetGraphName());
  65. GE_CHK_STATUS_RET(InitRuntimeParams(), "[%s] Failed to InitRuntimeParams", GetGraphName());
  66. GE_CHK_STATUS_RET(IndexSpecialNodes(), "[%s] Failed to index nodes", GetGraphName());
  67. GE_CHK_STATUS_RET(IndexTaskDefs(), "[%s] Failed to index task defs", GetGraphName());
  68. GE_CHK_STATUS_RET(LoadGraph(), "[%s] Failed to load graph", GetGraphName());
  69. GE_CHK_STATUS_RET(AssignUninitializedConstantOps(), "[%s] Failed to assign uninitialized constants", GetGraphName());
  70. GE_CHK_STATUS_RET(TransAllVarData(), "[%s] Failed to trans all var data", GetGraphName());
  71. GE_CHK_STATUS_RET(CopyVarData(), "[%s] Failed to copy var data", GetGraphName());
  72. GE_CHK_STATUS_RET(InitModelMem(), "[%s] Failed to init memory", GetGraphName());
  73. GE_CHK_STATUS_RET(InitWeights(), "[%s] Failed to init weights", GetGraphName());
  74. GE_CHK_STATUS_RET(InitConstantOps(), "[%s] Failed to init constant op", GetGraphName());
  75. GE_CHK_STATUS_RET(InitVariableTensors(), "[%s] Failed to init variables", GetGraphName());
  76. GE_CHK_STATUS_RET(LoadTasks(), "[%s] Failed to load tasks", GetGraphName());
  77. GELOGI("[%s] Done building hybrid model successfully.", GetGraphName());
  78. return SUCCESS;
  79. }
  80. Status HybridModelBuilder::ValidateParams() {
  81. GE_CHECK_NOTNULL(ge_root_model_);
  82. GE_CHECK_NOTNULL(ge_root_model_->GetRootGraph());
  83. return SUCCESS;
  84. }
  85. Status HybridModelBuilder::BuildNodeItem(const NodePtr &node, NodeItem &node_item) {
  86. auto op_desc = node->GetOpDesc();
  87. vector<string> dependencies = node->GetOpDesc()->GetOpInferDepends();
  88. GE_CHK_STATUS_RET(ParseDependentInputNodes(node_item, dependencies), "[%s] Failed to parse node dependencies.",
  89. node_item.NodeName().c_str());
  90. node_item.outputs.resize(node_item.num_outputs);
  91. for (int i = 0; i < node_item.num_outputs; ++i) {
  92. auto out_data_anchor = node->GetOutDataAnchor(i);
  93. if (out_data_anchor == nullptr) {
  94. GELOGE(INTERNAL_ERROR, "out anchor[%d] of node %s is nullptr", i, node->GetName().c_str());
  95. return INTERNAL_ERROR;
  96. }
  97. for (auto &dst_in_anchor : out_data_anchor->GetPeerInDataAnchors()) {
  98. auto dst_node = dst_in_anchor->GetOwnerNode();
  99. if (dst_node == nullptr) {
  100. GELOGW("dst node is nullptr. out anchor = %d", out_data_anchor->GetIdx());
  101. continue;
  102. }
  103. NodeItem *dst_node_item = nullptr;
  104. GE_CHK_STATUS_RET(GetOrCreateNodeItem(dst_node, &dst_node_item), "[%s] Failed to get or create node item.",
  105. dst_node->GetName().c_str());
  106. node_item.outputs[i].emplace_back(dst_in_anchor->GetIdx(), dst_node_item);
  107. }
  108. }
  109. GE_CHK_STATUS_RET_NOLOG(ResolveRefIo(node_item));
  110. return SUCCESS;
  111. }
  112. Status HybridModelBuilder::ResolveRefIo(NodeItem &node_item) {
  113. bool is_ref = false;
  114. auto &op_desc = *node_item.op_desc;
  115. (void)AttrUtils::GetBool(op_desc, ATTR_NAME_REFERENCE, is_ref);
  116. if (!is_ref) {
  117. return SUCCESS;
  118. }
  119. auto inputs = op_desc.GetAllInputName();
  120. auto outputs = op_desc.GetAllOutputName();
  121. for (auto &output : outputs) {
  122. for (auto &input : inputs) {
  123. if (input.first == output.first) {
  124. auto input_idx = static_cast<int>(input.second);
  125. auto output_idx = static_cast<int>(output.second);
  126. node_item.reuse_inputs[output_idx] = input_idx;
  127. GELOGD("[%s] Output[%d] reuse input[%d]", node_item.NodeName().c_str(), output_idx, input_idx);
  128. }
  129. }
  130. }
  131. return SUCCESS;
  132. }
  133. Status HybridModelBuilder::GetOrCreateNodeItem(const NodePtr &node, NodeItem **node_item) {
  134. auto &node_items = hybrid_model_.node_items_;
  135. auto it = node_items.find(node);
  136. if (it != node_items.end()) {
  137. *node_item = it->second.get();
  138. return SUCCESS;
  139. }
  140. auto new_node = std::unique_ptr<NodeItem>(new (std::nothrow) NodeItem(node));
  141. GE_CHECK_NOTNULL(new_node);
  142. GE_CHECK_NOTNULL(new_node->op_desc);
  143. GE_CHK_STATUS_RET(new_node->Init(), "Failed to init NodeItem [%s] .", node->GetName().c_str());
  144. GE_CHK_STATUS_RET_NOLOG(NodeExecutorManager::GetInstance().GetExecutor(*node, &new_node->node_executor));
  145. // we do not need L2 Buffer
  146. const char *const kIsFirstNode = "is_first_node";
  147. const char *const kIsLastNode = "is_last_node";
  148. (void)AttrUtils::SetBool(new_node->op_desc, kIsFirstNode, false);
  149. (void)AttrUtils::SetBool(new_node->op_desc, kIsLastNode, false);
  150. if (new_node->is_dynamic && (new_node->IsControlOp() || new_node->NodeType() == PARTITIONEDCALL)) {
  151. new_node->shape_inference_type = DEPEND_COMPUTE;
  152. }
  153. new_node->node_id = node_index;
  154. new_node->op_desc->SetId(node_index);
  155. node_index += 1;
  156. *node_item = new_node.get();
  157. node_items[node] = std::move(new_node);
  158. return SUCCESS;
  159. }
  160. Status HybridModelBuilder::ParseDependentInputNodes(NodeItem &node_item, const std::vector<string> &dependencies) {
  161. std::set<NodePtr> dependent_input_nodes;
  162. auto &ge_node = node_item.node;
  163. // The input tensors become valid after computation is done for parent nodes of type DEPEND_COMPUTE.
  164. // Wait for these parent nodes before execution.
  165. for (const auto &in_anchor : ge_node->GetAllInDataAnchors()) {
  166. const auto &peer_anchor = in_anchor->GetPeerOutAnchor();
  167. if (peer_anchor == nullptr) {
  168. GELOGD("[%s] Input[%d] do not have peer anchor", node_item.NodeName().c_str(), in_anchor->GetIdx());
  169. continue;
  170. }
  171. auto src_node = peer_anchor->GetOwnerNode();
  172. GE_CHECK_NOTNULL(src_node);
  173. auto src_node_item = MutableNodeItem(src_node);
  174. GE_CHECK_NOTNULL(src_node_item);
  175. if (src_node_item->shape_inference_type == DEPEND_COMPUTE) {
  176. GELOGD("[%s] Add input data dependent node [%s] due to inference type = DEPEND_COMPUTE",
  177. node_item.NodeName().c_str(), src_node_item->NodeName().c_str());
  178. src_node_item->has_observer = true;
  179. node_item.dependents_for_execution.emplace_back(src_node);
  180. }
  181. if (src_node_item->shape_inference_type == DEPEND_SHAPE_RANGE) {
  182. GELOGD("[%s] Add input shape dependent node [%s] due to inference type = DEPEND_SHAPE_RANGE",
  183. node_item.NodeName().c_str(), src_node_item->NodeName().c_str());
  184. src_node_item->has_observer = true;
  185. dependent_input_nodes.emplace(src_node);
  186. }
  187. }
  188. // cond or branch need to be prepared before the execution of IF or CASE
  189. if (node_item.node_type == IF || node_item.node_type == CASE) {
  190. const auto &in_anchor = ge_node->GetInDataAnchor(0);
  191. GE_CHECK_NOTNULL(in_anchor);
  192. const auto &peer_anchor = in_anchor->GetPeerOutAnchor();
  193. GE_CHECK_NOTNULL(peer_anchor);
  194. auto src_node = peer_anchor->GetOwnerNode();
  195. GE_CHECK_NOTNULL(src_node);
  196. auto src_node_item = MutableNodeItem(src_node);
  197. GE_CHECK_NOTNULL(src_node_item);
  198. src_node_item->has_observer = true;
  199. node_item.dependents_for_execution.emplace_back(src_node);
  200. GELOGD("[%s] Dependent added from %s for control op's cond/branch", node_item.NodeName().c_str(),
  201. src_node_item->NodeName().c_str());
  202. }
  203. for (const auto &input_name : dependencies) {
  204. int input_index = node_item.op_desc->GetInputIndexByName(input_name);
  205. if (input_index < 0) {
  206. GELOGE(INTERNAL_ERROR, "[%s] Failed to get input index by name: %s", node_item.NodeName().c_str(),
  207. input_name.c_str());
  208. return INTERNAL_ERROR;
  209. }
  210. const auto &in_anchor = ge_node->GetInDataAnchor(input_index);
  211. GE_CHECK_NOTNULL(in_anchor);
  212. const auto &peer_out_anchor = in_anchor->GetPeerOutAnchor();
  213. GE_CHECK_NOTNULL(peer_out_anchor);
  214. const auto &src_node = peer_out_anchor->GetOwnerNode();
  215. GE_CHECK_NOTNULL(src_node);
  216. auto src_node_item = MutableNodeItem(src_node);
  217. src_node_item->to_const_output_id_list.emplace(peer_out_anchor->GetIdx());
  218. src_node_item->has_observer = true;
  219. dependent_input_nodes.emplace(src_node);
  220. GELOGD("[%s] Dependent added from output of [%s:%d]", node_item.NodeName().c_str(),
  221. src_node_item->NodeName().c_str(), peer_out_anchor->GetIdx());
  222. }
  223. for (const auto &dep_node : dependent_input_nodes) {
  224. node_item.dependents_for_shape_inference.emplace_back(dep_node);
  225. }
  226. return SUCCESS;
  227. }
  228. Status HybridModelBuilder::UpdateAnchorStatus(const NodePtr &node) {
  229. if (NodeUtils::SetAllAnchorStatus(node) != GRAPH_SUCCESS) {
  230. GELOGE(INTERNAL_ERROR, "[%s] NodeUtils::SetAllAnchorStatus failed.", node->GetName().c_str());
  231. return INTERNAL_ERROR;
  232. }
  233. for (auto &anchor : node->GetAllInDataAnchors()) {
  234. auto peer_anchor = anchor->GetPeerOutAnchor();
  235. if (peer_anchor == nullptr) {
  236. if (AnchorUtils::SetStatus(anchor, ANCHOR_SUSPEND) != GRAPH_SUCCESS) {
  237. GELOGE(INTERNAL_ERROR, "[%s] AnchorUtils::SetStatus failed.", node->GetName().c_str());
  238. return INTERNAL_ERROR;
  239. }
  240. } else if (peer_anchor->GetOwnerNode()->GetType() == CONSTANT) {
  241. if (AnchorUtils::SetStatus(anchor, ANCHOR_CONST) != GRAPH_SUCCESS) {
  242. GELOGE(INTERNAL_ERROR, "[%s] AnchorUtils::SetStatus failed.", node->GetName().c_str());
  243. return INTERNAL_ERROR;
  244. }
  245. } else {
  246. if (AnchorUtils::SetStatus(anchor, ANCHOR_DATA) != GRAPH_SUCCESS) {
  247. GELOGE(INTERNAL_ERROR, "[%s] AnchorUtils::SetStatus failed.", node->GetName().c_str());
  248. return INTERNAL_ERROR;
  249. }
  250. }
  251. }
  252. return SUCCESS;
  253. }
  254. Status HybridModelBuilder::DoUnlinkDataAnchors(const OutDataAnchorPtr &out_data_anchor,
  255. const InDataAnchorPtr &in_data_anchor) {
  256. GE_CHK_GRAPH_STATUS_RET(out_data_anchor->Unlink(in_data_anchor), "Failed to unlink %s:%d from %s:%d",
  257. out_data_anchor->GetOwnerNode()->GetName().c_str(), out_data_anchor->GetIdx(),
  258. in_data_anchor->GetOwnerNode()->GetName().c_str(), in_data_anchor->GetIdx());
  259. GELOGD("Succeeded in unlinking %s:%d from %s:%d", out_data_anchor->GetOwnerNode()->GetName().c_str(),
  260. out_data_anchor->GetIdx(), in_data_anchor->GetOwnerNode()->GetName().c_str(), in_data_anchor->GetIdx());
  261. return SUCCESS;
  262. }
  263. Status HybridModelBuilder::DoLinkDataAnchors(OutDataAnchorPtr &out_data_anchor, InDataAnchorPtr &in_data_anchor) {
  264. GE_CHK_GRAPH_STATUS_RET(out_data_anchor->LinkTo(in_data_anchor), "Failed to link %s:%d to %s:%d",
  265. out_data_anchor->GetOwnerNode()->GetName().c_str(), out_data_anchor->GetIdx(),
  266. in_data_anchor->GetOwnerNode()->GetName().c_str(), in_data_anchor->GetIdx());
  267. GELOGD("Succeeded in linking %s:%d to %s:%d", out_data_anchor->GetOwnerNode()->GetName().c_str(),
  268. out_data_anchor->GetIdx(), in_data_anchor->GetOwnerNode()->GetName().c_str(), in_data_anchor->GetIdx());
  269. return SUCCESS;
  270. }
  271. Status HybridModelBuilder::MergeInputNodes(ComputeGraph &graph) {
  272. const auto &wrapped_node = graph.GetParentNode();
  273. std::set<NodePtr> root_nodes;
  274. for (const auto &node : graph.GetDirectNode()) {
  275. GE_CHECK_NOTNULL(node);
  276. if (node->GetType() != DATA_TYPE) {
  277. if (node->GetInDataNodes().empty()) {
  278. root_nodes.emplace(node);
  279. }
  280. continue;
  281. }
  282. auto data_op_desc = node->GetOpDesc();
  283. GE_CHECK_NOTNULL(data_op_desc);
  284. uint32_t parent_index = 0;
  285. if (!AttrUtils::GetInt(data_op_desc, ATTR_NAME_PARENT_NODE_INDEX, parent_index)) {
  286. GELOGE(FAILED, "[%s] Failed to get attr [%s]", data_op_desc->GetName().c_str(),
  287. ATTR_NAME_PARENT_NODE_INDEX.c_str());
  288. return FAILED;
  289. }
  290. auto wrapped_node_in_anchor = wrapped_node->GetInDataAnchor(parent_index);
  291. GE_CHECK_NOTNULL(wrapped_node_in_anchor);
  292. auto src_out_anchor = wrapped_node_in_anchor->GetPeerOutAnchor();
  293. if (src_out_anchor == nullptr || src_out_anchor->GetOwnerNode() == nullptr) {
  294. continue;
  295. }
  296. auto src_node = wrapped_node_in_anchor->GetPeerOutAnchor()->GetOwnerNode();
  297. wrapped_node_in_anchor->UnlinkAll();
  298. // link src to outputs of DataNode
  299. for (auto &out_data_anchor : node->GetAllOutDataAnchors()) {
  300. GE_CHECK_NOTNULL(out_data_anchor);
  301. for (auto &peer_in_data_anchor : out_data_anchor->GetPeerInDataAnchors()) {
  302. auto dst_node = peer_in_data_anchor->GetOwnerNode();
  303. root_nodes.emplace(dst_node);
  304. GE_CHK_STATUS_RET_NOLOG(DoUnlinkDataAnchors(out_data_anchor, peer_in_data_anchor));
  305. GE_CHK_STATUS_RET_NOLOG(DoLinkDataAnchors(src_out_anchor, peer_in_data_anchor));
  306. }
  307. }
  308. }
  309. // transfer in control edges to all root nodes
  310. for (auto &root_node : root_nodes) {
  311. auto in_nodes = root_node->GetInAllNodes();
  312. std::set<NodePtr> in_node_set(in_nodes.begin(), in_nodes.end());
  313. for (auto &in_control_node : wrapped_node->GetInControlNodes()) {
  314. if (in_node_set.count(in_control_node) == 0) {
  315. GELOGD("[%s] Restore control edge to [%s]", in_control_node->GetName().c_str(), root_node->GetName().c_str());
  316. GE_CHECK_NOTNULL(in_control_node->GetOutControlAnchor());
  317. (void)in_control_node->GetOutControlAnchor()->LinkTo(root_node->GetInControlAnchor());
  318. }
  319. }
  320. }
  321. wrapped_node->GetInControlAnchor()->UnlinkAll();
  322. return SUCCESS;
  323. }
  324. Status HybridModelBuilder::MergeNetOutputNode(ComputeGraph &graph) {
  325. const auto &parent_node = graph.GetParentNode();
  326. const NodePtr &net_output_node = graph.FindFirstNodeMatchType(NETOUTPUT);
  327. GE_CHECK_NOTNULL(net_output_node);
  328. const auto &net_output_desc = net_output_node->GetOpDesc();
  329. GE_CHECK_NOTNULL(net_output_desc);
  330. auto all_in_nodes = net_output_node->GetInAllNodes();
  331. auto all_out_nodes = parent_node->GetOutAllNodes();
  332. net_output_node->GetInControlAnchor()->UnlinkAll();
  333. parent_node->GetOutControlAnchor()->UnlinkAll();
  334. for (const auto &in_data_anchor : net_output_node->GetAllInDataAnchors()) {
  335. auto src_out_anchor = in_data_anchor->GetPeerOutAnchor();
  336. GE_CHECK_NOTNULL(src_out_anchor);
  337. GE_CHK_STATUS_RET_NOLOG(DoUnlinkDataAnchors(src_out_anchor, in_data_anchor));
  338. auto index = in_data_anchor->GetIdx();
  339. auto input_desc = net_output_desc->MutableInputDesc(index);
  340. if (input_desc == nullptr) {
  341. GELOGE(INTERNAL_ERROR, "[%s] Failed to get input desc[%d]", net_output_desc->GetName().c_str(), index);
  342. return INTERNAL_ERROR;
  343. }
  344. uint32_t parent_index = 0;
  345. if (!AttrUtils::GetInt(input_desc, ATTR_NAME_PARENT_NODE_INDEX, parent_index)) {
  346. GELOGW("SubGraph: %s NetOutput input tensor %d, attr %s not found.", graph.GetName().c_str(), index,
  347. ATTR_NAME_PARENT_NODE_INDEX.c_str());
  348. continue;
  349. }
  350. const OutDataAnchorPtr &parent_out_anchor = parent_node->GetOutDataAnchor(parent_index);
  351. GE_CHECK_NOTNULL(parent_out_anchor);
  352. for (InDataAnchorPtr &dst_in_anchor : parent_out_anchor->GetPeerInDataAnchors()) {
  353. if (dst_in_anchor == nullptr) {
  354. continue;
  355. }
  356. GE_CHK_STATUS_RET_NOLOG(DoUnlinkDataAnchors(parent_out_anchor, dst_in_anchor));
  357. GE_CHK_STATUS_RET_NOLOG(DoLinkDataAnchors(src_out_anchor, dst_in_anchor));
  358. }
  359. }
  360. // transfer out control edges
  361. std::set<NodePtr> in_node_set(all_in_nodes.begin(), all_in_nodes.end());
  362. std::set<NodePtr> out_node_set(all_out_nodes.begin(), all_out_nodes.end());
  363. for (auto &src_node : in_node_set) {
  364. GELOGD("[%s] process in node.", src_node->GetName().c_str());
  365. auto out_nodes = src_node->GetOutAllNodes();
  366. std::set<NodePtr> node_set(out_nodes.begin(), out_nodes.end());
  367. for (auto &dst_node : out_node_set) {
  368. if (node_set.count(dst_node) == 0) {
  369. src_node->GetOutControlAnchor()->LinkTo(dst_node->GetInControlAnchor());
  370. GELOGD("[%s] Restore control edge to [%s]", src_node->GetName().c_str(), dst_node->GetName().c_str());
  371. }
  372. }
  373. }
  374. return SUCCESS;
  375. }
  376. Status HybridModelBuilder::UnfoldSubgraphs(ComputeGraph &root_graph, ComputeGraphPtr &merged_graph) {
  377. merged_graph = MakeShared<ComputeGraph>("MergedGraph");
  378. for (const auto &node : root_graph.GetDirectNode()) {
  379. GE_CHECK_NOTNULL(node);
  380. auto op_desc = node->GetOpDesc();
  381. GE_CHECK_NOTNULL(op_desc);
  382. const auto &op_type = node->GetType();
  383. if (op_type != PARTITIONEDCALL) {
  384. merged_graph->AddNode(node);
  385. GELOGD("[%s] Node added to merged graph.", op_desc->GetName().c_str());
  386. continue;
  387. }
  388. bool is_unknown_shape = false;
  389. GE_CHK_GRAPH_STATUS_RET(NodeUtils::GetNodeUnknownShapeStatus(*node, is_unknown_shape),
  390. "Failed to invoke GetNodeUnknownShapeStatus.");
  391. if (!is_unknown_shape) {
  392. merged_graph->AddNode(node);
  393. GELOGD("[%s] Known shape partitioned call added to merged graph.", op_desc->GetName().c_str());
  394. continue;
  395. }
  396. auto subgraph = NodeUtils::GetSubgraph(*node, kSubgraphIndex);
  397. GE_CHECK_NOTNULL(subgraph);
  398. GE_CHK_GRAPH_STATUS_RET(UnfoldSubgraph(root_graph, *merged_graph, *subgraph), "[%s] Failed to merge subgraph.",
  399. subgraph->GetName().c_str());
  400. }
  401. // invoke before adding subgraphs. in case modify node id in known-shaped subgraphs.
  402. GE_CHK_GRAPH_STATUS_RET(merged_graph->TopologicalSorting(), "Failed to invoke TopologicalSorting on merged graph.");
  403. for (auto &remained_subgraph : root_graph.GetAllSubgraphs()) {
  404. GELOGD("Adding subgraph [%s] to merged-graph.", remained_subgraph->GetName().c_str());
  405. GE_CHK_GRAPH_STATUS_RET(merged_graph->AddSubgraph(remained_subgraph), "Failed to add subgraph [%s]",
  406. remained_subgraph->GetName().c_str());
  407. }
  408. return SUCCESS;
  409. }
  410. Status HybridModelBuilder::UnfoldSubgraph(ComputeGraph &root_graph, ComputeGraph &parent_graph,
  411. ComputeGraph &sub_graph) {
  412. auto parent_node = sub_graph.GetParentNode();
  413. GE_CHECK_NOTNULL(parent_node);
  414. GE_CHK_STATUS_RET(MergeInputNodes(sub_graph), "[%s] Failed to merge data nodes for subgraph",
  415. sub_graph.GetName().c_str());
  416. GE_CHK_STATUS_RET(MergeNetOutputNode(sub_graph), "[%s] Failed to merge net output nodes for subgraph",
  417. sub_graph.GetName().c_str());
  418. GELOGD("[%s] Done merging subgraph inputs and outputs successfully.", sub_graph.GetName().c_str());
  419. for (auto &sub_node : sub_graph.GetDirectNode()) {
  420. auto sub_op_type = sub_node->GetType();
  421. if (sub_op_type == DATA_TYPE || sub_op_type == NETOUTPUT) {
  422. continue;
  423. }
  424. if (sub_op_type == CONSTANT || sub_op_type == VARIABLE) {
  425. GELOGE(INTERNAL_ERROR, "Unexpected node in unknown subgraph. type = %s, node = %s::%s", sub_op_type.c_str(),
  426. sub_graph.GetName().c_str(), sub_node->GetName().c_str());
  427. return INTERNAL_ERROR;
  428. }
  429. if (sub_op_type == PARTITIONEDCALL) {
  430. bool is_unknown_shape = false;
  431. GE_CHK_GRAPH_STATUS_RET(NodeUtils::GetNodeUnknownShapeStatus(*sub_node, is_unknown_shape),
  432. "[%s] Failed to invoke GetNodeUnknownShapeStatus.", sub_node->GetName().c_str());
  433. if (is_unknown_shape) {
  434. auto sub_sub_graph = NodeUtils::GetSubgraph(*sub_node, kSubgraphIndex);
  435. GE_CHECK_NOTNULL(sub_sub_graph);
  436. GE_CHK_STATUS_RET(UnfoldSubgraph(root_graph, parent_graph, *sub_sub_graph), "[%s] Failed to merge subgraph",
  437. sub_sub_graph->GetName().c_str());
  438. continue;
  439. }
  440. }
  441. parent_graph.AddNode(sub_node);
  442. GELOGD("[%s::%s] added to parent graph: [%s].", sub_graph.GetName().c_str(), sub_node->GetName().c_str(),
  443. parent_graph.GetName().c_str());
  444. }
  445. GELOGD("[%s] Done merging subgraph. remove it from root graph.", sub_graph.GetName().c_str());
  446. root_graph.RemoveSubgraph(sub_graph.GetName());
  447. return SUCCESS;
  448. }
  449. Status HybridModelBuilder::BuildOutputMapping(GraphItem &graph_item, const NodeItem &node_item, bool is_root_graph) {
  450. auto output_size = node_item.op_desc->GetAllInputsSize();
  451. GE_CHECK_LE(output_size, UINT32_MAX);
  452. graph_item.output_edges_.resize(output_size);
  453. for (auto &in_data_anchor : node_item.node->GetAllInDataAnchors()) {
  454. auto peer_out_anchor = in_data_anchor->GetPeerOutAnchor();
  455. GE_CHECK_NOTNULL(peer_out_anchor);
  456. auto src_node = peer_out_anchor->GetOwnerNode();
  457. GE_CHECK_NOTNULL(src_node);
  458. auto src_node_item = GetNodeItem(src_node);
  459. GE_CHECK_NOTNULL(src_node_item);
  460. auto output_offset = src_node_item->output_start + peer_out_anchor->GetIdx();
  461. GELOGI("Output[%d], node = %s, output_index = %d, output_offset = %d ", in_data_anchor->GetIdx(),
  462. src_node_item->NodeName().c_str(), peer_out_anchor->GetIdx(), output_offset);
  463. graph_item.output_edges_[in_data_anchor->GetIdx()] = {src_node_item, peer_out_anchor->GetIdx()};
  464. }
  465. if (!is_root_graph) {
  466. for (uint32_t i = 0; i < static_cast<uint32_t>(output_size); ++i) {
  467. uint32_t p_index = i;
  468. // Net output of Subgraph of while do not have parent index
  469. if (AttrUtils::GetInt(node_item.op_desc->GetInputDesc(i), ATTR_NAME_PARENT_NODE_INDEX, p_index)) {
  470. GELOGD("[%s] Parent index not set for input[%u].", node_item.NodeName().c_str(), i);
  471. }
  472. graph_item.output_index_mapping_.emplace_back(p_index);
  473. }
  474. }
  475. return SUCCESS;
  476. }
  477. Status HybridModelBuilder::LoadGraph() {
  478. auto root_graph = ge_root_model_->GetRootGraph();
  479. if (!GetContext().GetHostExecFlag()) {
  480. std::shared_ptr<ComputeGraph> merged_graph;
  481. GELOGI("Before merging subgraphs DirectNodesSize = %zu, GetAllNodesSize = %zu", root_graph->GetDirectNodesSize(),
  482. root_graph->GetAllNodesSize());
  483. GE_CHK_GRAPH_STATUS_RET(UnfoldSubgraphs(*root_graph, merged_graph), "Failed to unfold subgraphs.");
  484. root_graph = std::move(merged_graph);
  485. GELOGI("After merging subgraphs DirectNodesSize = %zu, GetAllNodesSize = %zu", root_graph->GetDirectNodesSize(),
  486. root_graph->GetAllNodesSize());
  487. GE_DUMP(root_graph, "hybrid_merged_graph");
  488. }
  489. GE_CHK_STATUS_RET(LoadDynamicSubgraph(*root_graph, true), "Failed to load root graph.");
  490. GELOGD("Done loading root graph successfully.");
  491. for (auto &sub_graph : root_graph->GetAllSubgraphs()) {
  492. GE_CHECK_NOTNULL(sub_graph);
  493. GELOGD("Start to load subgraph [%s]", sub_graph->GetName().c_str());
  494. auto parent_node = sub_graph->GetParentNode();
  495. GE_CHECK_NOTNULL(parent_node);
  496. auto parent_node_item = MutableNodeItem(parent_node);
  497. // parent node is in another known subgraph
  498. if (parent_node_item == nullptr) {
  499. GELOGD("[%s] Subgraph is in another known shaped subgraph, skip it.", sub_graph->GetName().c_str());
  500. continue;
  501. }
  502. if (sub_graph->GetGraphUnknownFlag()) {
  503. GE_CHK_STATUS_RET(LoadDynamicSubgraph(*sub_graph, false), "Failed to load subgraph: [%s]",
  504. sub_graph->GetName().c_str());
  505. } else {
  506. GE_CHK_STATUS_RET(IdentifyVariableOutputs(*parent_node_item), "[%s] Failed to identify ref outputs.",
  507. parent_node_item->NodeName().c_str());
  508. // if parent is function control op. need add a virtual partitioned call
  509. if (parent_node_item->IsControlOp()) {
  510. GE_CHK_STATUS_RET(LoadKnownShapedSubgraph(*sub_graph, parent_node_item),
  511. "Failed to load function control op subgraph [%s]", sub_graph->GetName().c_str());
  512. }
  513. }
  514. }
  515. GELOGI("Done loading all subgraphs successfully.");
  516. return SUCCESS;
  517. }
  518. const NodeItem *HybridModelBuilder::GetNodeItem(const NodePtr &node) const { return hybrid_model_.GetNodeItem(node); }
  519. NodeItem *HybridModelBuilder::MutableNodeItem(const NodePtr &node) { return hybrid_model_.MutableNodeItem(node); }
  520. Status HybridModelBuilder::VarNodeToTensor(const NodePtr &var_node, std::unique_ptr<TensorValue> &tensor) {
  521. string var_name = var_node->GetName();
  522. auto tensor_desc = var_node->GetOpDesc()->MutableOutputDesc(0);
  523. uint8_t *var_logic = nullptr;
  524. GE_CHK_STATUS_RET(var_manager_->GetVarAddr(var_name, *tensor_desc, &var_logic),
  525. "Failed to get var addr. var_name = %s, session_id = %ld", var_name.c_str(),
  526. hybrid_model_.GetSessionId());
  527. uint8_t *dev_mem = var_manager_->GetVarMemoryAddr(var_logic, RT_MEMORY_HBM);
  528. if (dev_mem == nullptr) {
  529. GELOGE(INTERNAL_ERROR,
  530. "Failed to copy var %s from device, cant not get "
  531. "var addr from logic addr %p",
  532. var_node->GetName().c_str(), var_logic);
  533. return INTERNAL_ERROR;
  534. }
  535. int64_t var_size = CalcVarSizeInBytes(*tensor_desc);
  536. // var size is only for checking, will not allocate any memory by it
  537. tensor.reset(new (std::nothrow) TensorValue(dev_mem, static_cast<size_t>(var_size)));
  538. GE_CHECK_NOTNULL(tensor);
  539. return SUCCESS;
  540. }
  541. Status HybridModelBuilder::HandleDtString(const GeTensor &tensor, void *var_addr) {
  542. auto desc = tensor.GetTensorDesc();
  543. if (desc.GetDataType() == DT_STRING) {
  544. GeShape tensor_shape = desc.GetShape();
  545. /// if tensor is a scaler, it's shape size if zero, according ge_tensor.cc.
  546. /// the logic of GetShapeSize is wrong, the scaler tensor's GetShapeSize is zero
  547. /// and that of unknown shape is zero too.
  548. /// unknown shape will not appear here, so we can use zero judge a tensor is scalar or not
  549. int64_t elem_num = tensor_shape.GetShapeSize();
  550. if (elem_num == 0 && tensor_shape.GetDims().empty()) {
  551. elem_num = 1;
  552. }
  553. auto &mutable_tensor = const_cast<GeTensor &>(tensor);
  554. uint64_t *buff = reinterpret_cast<uint64_t *>(mutable_tensor.MutableData().data());
  555. GE_CHK_BOOL_RET_STATUS(ge::CheckInt64Uint32MulOverflow(elem_num, kBytes) == SUCCESS, FAILED,
  556. "Shape size is invalid");
  557. auto offset = static_cast<uint64_t>(elem_num * kBytes);
  558. auto hbm_raw_data_base_addr = reinterpret_cast<uint64_t>(reinterpret_cast<uintptr_t>(var_addr) + offset);
  559. for (int64_t i = elem_num - 1; i >= 0; --i) {
  560. buff[i] = hbm_raw_data_base_addr + (buff[i] - buff[0]);
  561. }
  562. }
  563. return SUCCESS;
  564. }
  565. Status HybridModelBuilder::AssignUninitializedConstantOps() {
  566. if (GetContext().GetHostExecFlag()) {
  567. GELOGI("no need to assign when exec on host.");
  568. return SUCCESS;
  569. }
  570. for (auto &it : hybrid_model_.constant_op_nodes_) {
  571. const string &var_name = it.first;
  572. const NodePtr &var_node = it.second;
  573. auto tensor_desc = var_node->GetOpDesc()->MutableOutputDesc(0);
  574. if (!var_manager_->IsVarExist(var_name, *tensor_desc)) {
  575. // allocate constant
  576. GELOGD("[%s] Constant not allocated during graph building. now allocate it.", var_name.c_str());
  577. GE_CHK_STATUS_RET(var_manager_->AssignVarMem(var_name, *tensor_desc, RT_MEMORY_HBM));
  578. GE_CHK_STATUS_RET(var_manager_->SetAllocatedGraphId(var_name, runtime_param_.graph_id));
  579. }
  580. }
  581. return SUCCESS;
  582. }
  583. Status HybridModelBuilder::InitConstantOps() {
  584. for (auto &it : hybrid_model_.constant_op_nodes_) {
  585. const string &var_name = it.first;
  586. const NodePtr &var_node = it.second;
  587. std::unique_ptr<TensorValue> var_tensor;
  588. GE_CHK_STATUS_RET_NOLOG(VarNodeToTensor(var_node, var_tensor));
  589. GELOGD("Init const op tensor. name = %s, size = %ld", var_name.c_str(), var_tensor->GetSize());
  590. var_tensor->SetName("ConstOp_" + var_name);
  591. auto op_desc = var_node->GetOpDesc();
  592. auto v_weights = ModelUtils::GetWeights(op_desc);
  593. auto v_output_size = var_tensor->GetSize();
  594. auto v_output_addr = var_tensor->MutableData();
  595. auto *ge_tensor = const_cast<GeTensor *>(v_weights[0].get());
  596. if (ge_tensor->GetData().size() > 0) {
  597. GE_CHK_STATUS_RET_NOLOG(HandleDtString(*ge_tensor, v_output_addr));
  598. GELOGI("[IMAS]InitConstant memcpy graph_%u type[V] name[%s] output[%d] memaddr[%p] mem_size[%zu] datasize[%zu]",
  599. runtime_param_.graph_id, op_desc->GetName().c_str(), 0, v_output_addr, v_output_size,
  600. ge_tensor->GetData().size());
  601. GE_CHK_RT_RET(rtMemcpy(v_output_addr, v_output_size, ge_tensor->GetData().data(), ge_tensor->GetData().size(),
  602. RT_MEMCPY_HOST_TO_DEVICE));
  603. } else {
  604. GELOGI("[%s] Const op has no weight data.", op_desc->GetName().c_str());
  605. }
  606. hybrid_model_.variable_tensors_.emplace(var_name, std::move(var_tensor));
  607. }
  608. return SUCCESS;
  609. }
  610. Status HybridModelBuilder::InitVariableTensors() {
  611. for (auto &it : hybrid_model_.variable_nodes_) {
  612. string var_name = it.first;
  613. NodePtr &var_node = it.second;
  614. std::unique_ptr<TensorValue> tensor;
  615. GE_CHK_STATUS_RET_NOLOG(VarNodeToTensor(var_node, tensor));
  616. GELOGD("Init variable tensor. name = %s, size = %ld, addr = %p", var_name.c_str(), tensor->GetSize(),
  617. tensor->GetData());
  618. tensor->SetName("Var_" + var_name);
  619. hybrid_model_.variable_tensors_.emplace(var_name, std::move(tensor));
  620. }
  621. return SUCCESS;
  622. }
  623. Status HybridModelBuilder::InitWeights() {
  624. // Train do not have weight. (only got ConstOp)
  625. return SUCCESS;
  626. }
  627. Status HybridModelBuilder::LoadTasks() {
  628. for (auto &it : hybrid_model_.node_items_) {
  629. auto &node_item = it.second;
  630. auto &node_ptr = node_item->node;
  631. if (node_item->node_type == NETOUTPUT) {
  632. continue;
  633. }
  634. GELOGD("[%s] Start to build kernel task", node_ptr->GetName().c_str());
  635. auto load_ret = node_item->node_executor->LoadTask(hybrid_model_, node_ptr, node_item->kernel_task);
  636. if (load_ret != UNSUPPORTED && load_ret != SUCCESS) {
  637. GELOGE(load_ret, "[%s] Failed to load task", node_ptr->GetName().c_str());
  638. return load_ret;
  639. }
  640. GELOGD("[%s] Done loading task successfully.", node_ptr->GetName().c_str());
  641. }
  642. return SUCCESS;
  643. }
  644. Status HybridModelBuilder::LoadGeModel(ComputeGraph &sub_graph, const GeModelPtr &ge_model) {
  645. auto parent_node = sub_graph.GetParentNode();
  646. GE_CHECK_NOTNULL(parent_node);
  647. auto op_type = parent_node->GetType();
  648. if (op_type == IF || op_type == CASE || op_type == WHILE) {
  649. GELOGD("Set ge_model for control op subgraph: [%s], task_size = %d", sub_graph.GetName().c_str(),
  650. ge_model->GetModelTaskDefPtr()->task_size());
  651. subgraph_models_.emplace(sub_graph.GetName(), ge_model);
  652. } else {
  653. GELOGD("Set ge_model for subgraph: [%s], task_size = %d", sub_graph.GetName().c_str(),
  654. ge_model->GetModelTaskDefPtr()->task_size());
  655. hybrid_model_.known_shape_sub_models_.emplace(sub_graph.GetParentNode(), ge_model);
  656. }
  657. return SUCCESS;
  658. }
  659. Status HybridModelBuilder::IndexTaskDefs() {
  660. const auto &root_graph = ge_root_model_->GetRootGraph();
  661. for (auto &it : ge_root_model_->GetSubgraphInstanceNameToModel()) {
  662. auto &name = it.first;
  663. auto &ge_model = it.second;
  664. GE_CHECK_NOTNULL(ge_model);
  665. const auto &sub_graph = root_graph->GetSubgraph(name);
  666. if (sub_graph == nullptr) {
  667. continue;
  668. }
  669. bool is_unknown_shape = sub_graph->GetGraphUnknownFlag();
  670. if (!is_unknown_shape) {
  671. GE_CHK_STATUS_RET_NOLOG(LoadGeModel(*sub_graph, ge_model));
  672. continue;
  673. }
  674. // index task defs
  675. GELOGD("To index tasks for subgraph: %s", name.c_str());
  676. unordered_map<int64_t, NodePtr> node_map;
  677. for (const auto &node : sub_graph->GetDirectNode()) {
  678. GE_CHECK_NOTNULL(node);
  679. GE_CHECK_NOTNULL(node->GetOpDesc());
  680. auto node_id = node->GetOpDesc()->GetId();
  681. GELOGD("op_index = %ld, node_name = %s", node_id, node->GetName().c_str());
  682. node_map.emplace(node_id, node);
  683. }
  684. auto tasks = ge_model->GetModelTaskDefPtr()->task();
  685. for (int i = 0; i < tasks.size(); ++i) {
  686. const domi::TaskDef &task_def = tasks[i];
  687. GELOGI("Task id = %d, task type = %d", i, task_def.type());
  688. auto task_type = static_cast<rtModelTaskType_t>(task_def.type());
  689. uint32_t op_index = -1;
  690. if (task_type == RT_MODEL_TASK_KERNEL) {
  691. op_index = task_def.kernel().context().op_index();
  692. } else if (task_type == RT_MODEL_TASK_KERNEL_EX) {
  693. op_index = task_def.kernel_ex().op_index();
  694. } else if (task_type == RT_MODEL_TASK_HCCL) {
  695. op_index = task_def.kernel_hccl().op_index();
  696. } else {
  697. GELOGD("Skip task type: %d", static_cast<int>(task_type));
  698. continue;
  699. }
  700. auto iter = node_map.find(op_index);
  701. if (iter == node_map.end()) {
  702. GELOGE(INTERNAL_ERROR, "Failed to get node by index = %u", op_index);
  703. return INTERNAL_ERROR;
  704. }
  705. auto &node = iter->second;
  706. if (task_type == RT_MODEL_TASK_KERNEL) {
  707. ge_model->GetTBEKernelStore().LoadTBEKernelBinToOpDesc(node->GetOpDesc());
  708. }
  709. GELOGD("Task loaded for node: %s, task type = %d, op_index = %u", node->GetName().c_str(), task_type, op_index);
  710. hybrid_model_.task_defs_[node].emplace_back(task_def);
  711. }
  712. }
  713. return SUCCESS;
  714. }
  715. Status HybridModelBuilder::IndexSpecialNodes() {
  716. GELOGD("Start to index special nodes");
  717. const auto &root_graph = ge_root_model_->GetRootGraph();
  718. for (auto &node : root_graph->GetAllNodes()) {
  719. GE_CHECK_NOTNULL(node);
  720. GE_CHECK_NOTNULL(node->GetOpDesc());
  721. auto op_type = node->GetType();
  722. GELOGD("node name = %s, node type = %s", node->GetName().c_str(), node->GetType().c_str());
  723. if (op_type == VARIABLE) {
  724. hybrid_model_.variable_nodes_.emplace(node->GetName(), node);
  725. } else if (op_type == CONSTANTOP) {
  726. hybrid_model_.constant_op_nodes_.emplace(node->GetName(), node);
  727. } else if (op_type == DATA && node->GetOwnerComputeGraph() != root_graph) {
  728. NodePtr src_node;
  729. int peer_out_index = -1;
  730. GE_CHK_STATUS_RET_NOLOG(GetPeerNodeAcrossSubGraphs(node, src_node, peer_out_index));
  731. GELOGD("Got peer node for data node %s, peer node = %s(%s)", node->GetName().c_str(), src_node->GetName().c_str(),
  732. src_node->GetType().c_str());
  733. auto src_op_type = src_node->GetType();
  734. if (src_op_type == CONSTANTOP || src_op_type == VARIABLE) {
  735. for (auto &dst_node_and_in_anchor : node->GetOutDataNodesAndAnchors()) {
  736. auto &dst_node = dst_node_and_in_anchor.first;
  737. auto &in_anchor = dst_node_and_in_anchor.second;
  738. node_ref_inputs_[dst_node].emplace_back(std::make_pair(in_anchor->GetIdx(), src_node));
  739. }
  740. }
  741. }
  742. }
  743. return SUCCESS;
  744. }
  745. Status HybridModelBuilder::GetPeerNodeAcrossSubGraphs(const NodePtr &data_node, NodePtr &peer_node,
  746. int &peer_out_index) {
  747. auto sub_graph = data_node->GetOwnerComputeGraph();
  748. GE_CHECK_NOTNULL(sub_graph);
  749. GELOGD("To get peer node of %s::%s", sub_graph->GetName().c_str(), data_node->GetName().c_str());
  750. auto wrapped_node = data_node->GetOwnerComputeGraph()->GetParentNode();
  751. if (wrapped_node == nullptr) {
  752. GELOGE(INTERNAL_ERROR, "[%s] Node is in root graph.", data_node->GetName().c_str());
  753. return INTERNAL_ERROR;
  754. }
  755. auto data_op_desc = data_node->GetOpDesc();
  756. uint32_t parent_index = 0;
  757. if (!AttrUtils::GetInt(data_op_desc, ATTR_NAME_PARENT_NODE_INDEX, parent_index)) {
  758. GELOGE(INTERNAL_ERROR, "[%s] Failed to get attr [%s]", data_op_desc->GetName().c_str(),
  759. ATTR_NAME_PARENT_NODE_INDEX.c_str());
  760. return INTERNAL_ERROR;
  761. }
  762. auto wrapped_node_in_anchor = wrapped_node->GetInDataAnchor(parent_index);
  763. GE_CHECK_NOTNULL(wrapped_node_in_anchor);
  764. auto src_out_anchor = wrapped_node_in_anchor->GetPeerOutAnchor();
  765. if (src_out_anchor == nullptr || src_out_anchor->GetOwnerNode() == nullptr) {
  766. GELOGE(INTERNAL_ERROR, "[%s] Parent node do not have peer anchor.", data_node->GetName().c_str());
  767. return INTERNAL_ERROR;
  768. }
  769. auto src_wrapped_node_out_anchor = wrapped_node_in_anchor->GetPeerOutAnchor();
  770. GE_CHECK_NOTNULL(src_wrapped_node_out_anchor);
  771. auto src_wrapped_node = src_wrapped_node_out_anchor->GetOwnerNode();
  772. GE_CHECK_NOTNULL(src_wrapped_node);
  773. // connected to root-graph's DATA
  774. auto src_node_type = src_wrapped_node->GetType();
  775. if (src_node_type != PARTITIONEDCALL) {
  776. peer_node = src_wrapped_node;
  777. peer_out_index = kVarOutputIndex;
  778. GELOGD("[%s] Node is connected to root graph's node: %s", data_node->GetName().c_str(),
  779. peer_node->GetName().c_str());
  780. return SUCCESS;
  781. }
  782. auto src_graph = NodeUtils::GetSubgraph(*src_wrapped_node, kSubgraphIndex);
  783. GE_CHECK_NOTNULL(src_graph);
  784. auto src_net_output_node = src_graph->FindFirstNodeMatchType(NETOUTPUT);
  785. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(src_net_output_node == nullptr, return INTERNAL_ERROR,
  786. "Failed to find NetOutput in subgraph: %s", src_graph->GetName().c_str());
  787. auto net_output_desc = src_net_output_node->GetOpDesc();
  788. GE_CHECK_NOTNULL(net_output_desc);
  789. auto out_index = static_cast<uint32_t>(src_wrapped_node_out_anchor->GetIdx());
  790. GELOGD("src graph = %s, src parent output index = %d", src_graph->GetName().c_str(), out_index);
  791. // link src to outputs of DataNode
  792. auto input_size = net_output_desc->GetAllInputsSize();
  793. GE_CHECK_LE(input_size, UINT32_MAX);
  794. for (uint32_t i = 0; i < static_cast<uint32_t>(input_size); ++i) {
  795. uint32_t p_index = 0;
  796. if (!AttrUtils::GetInt(net_output_desc->GetInputDesc(i), ATTR_NAME_PARENT_NODE_INDEX, p_index)) {
  797. GELOGW("SubGraph: %s input tensor %u attr %s not found.", src_graph->GetName().c_str(), i,
  798. ATTR_NAME_PARENT_NODE_INDEX.c_str());
  799. continue;
  800. }
  801. GELOGD("NetOutput's input[%u], parent_node_index = %u", i, p_index);
  802. if (p_index == out_index) {
  803. auto in_anchor = src_net_output_node->GetInDataAnchor(i);
  804. GE_CHECK_NOTNULL(in_anchor);
  805. auto peer_out_anchor = in_anchor->GetPeerOutAnchor();
  806. GE_CHECK_NOTNULL(peer_out_anchor);
  807. peer_node = peer_out_anchor->GetOwnerNode();
  808. GE_CHECK_NOTNULL(peer_node);
  809. peer_out_index = peer_out_anchor->GetIdx();
  810. GELOGD("Found peer node of Data node: %s::%s is %s::%s", sub_graph->GetName().c_str(),
  811. data_node->GetName().c_str(), src_graph->GetName().c_str(), peer_node->GetName().c_str());
  812. return SUCCESS;
  813. }
  814. }
  815. GELOGE(FAILED, "Failed to find peer node for %s::%s", sub_graph->GetName().c_str(), data_node->GetName().c_str());
  816. return FAILED;
  817. }
  818. Status HybridModelBuilder::InitRuntimeParams() {
  819. int64_t value = 0;
  820. bool ret = false;
  821. if (ge_root_model_->GetSubgraphInstanceNameToModel().empty()) {
  822. GELOGE(INTERNAL_ERROR, "Root model has no sub model");
  823. return INTERNAL_ERROR;
  824. }
  825. // session id and var size is same for every model
  826. auto first_model = ge_root_model_->GetSubgraphInstanceNameToModel().begin()->second;
  827. ret = ge::AttrUtils::GetInt(first_model, ge::MODEL_ATTR_SESSION_ID, value);
  828. runtime_param_.session_id = ret ? static_cast<uint64_t>(value) : 0;
  829. ret = ge::AttrUtils::GetInt(first_model, ATTR_MODEL_TASK_GEN_VAR_ADDR, value);
  830. runtime_param_.logic_var_base = ret ? static_cast<uint64_t>(value) : 0;
  831. runtime_param_.graph_id = ge_root_model_->GetRootGraph()->GetGraphID();
  832. value = 0;
  833. for (auto &it : ge_root_model_->GetSubgraphInstanceNameToModel()) {
  834. (void)ge::AttrUtils::GetInt(it.second, ATTR_MODEL_VAR_SIZE, value);
  835. if (value > 0) {
  836. runtime_param_.var_size = static_cast<uint64_t>(value);
  837. break;
  838. }
  839. }
  840. GELOGI("InitRuntimeParams(), session_id:%lu, var_size:%lu. graph_id = %u", runtime_param_.session_id,
  841. runtime_param_.var_size, runtime_param_.graph_id);
  842. var_manager_ = VarManager::Instance(runtime_param_.session_id);
  843. GE_CHECK_NOTNULL(var_manager_);
  844. return SUCCESS;
  845. }
  846. Status HybridModelBuilder::IdentifyVariableOutputs(NodeItem &node_item) {
  847. GELOGD("Start to parse outputs of node: %s", node_item.NodeName().c_str());
  848. auto subgraph = NodeUtils::GetSubgraph(*node_item.node, kSubgraphIndex);
  849. GE_CHECK_NOTNULL(subgraph);
  850. auto net_output_node = subgraph->FindFirstNodeMatchType(NETOUTPUT);
  851. if (net_output_node == nullptr) {
  852. GELOGD("[%s] Subgraph do not got net output", subgraph->GetName().c_str());
  853. return SUCCESS;
  854. }
  855. auto net_output_desc = net_output_node->GetOpDesc();
  856. GE_CHECK_NOTNULL(net_output_desc);
  857. // constant/variable connected to net output
  858. for (const auto &in_data_anchor : net_output_node->GetAllInDataAnchors()) {
  859. auto src_node = GetPeerNode(in_data_anchor);
  860. GE_CHECK_NOTNULL(src_node);
  861. auto src_op_type = src_node->GetType();
  862. GELOGD("Node %s, output %d, src node = %s, src node type = %s", node_item.NodeName().c_str(),
  863. in_data_anchor->GetIdx(), src_node->GetName().c_str(), src_op_type.c_str());
  864. if (src_op_type != CONSTANTOP && src_op_type != VARIABLE) {
  865. continue;
  866. }
  867. uint32_t parent_index = 0;
  868. GE_CHK_STATUS_RET_NOLOG(GetParentNodeOutputIndex(*net_output_desc, in_data_anchor->GetIdx(), parent_index));
  869. GELOGD("Got parent output index = %u", parent_index);
  870. node_item.ref_outputs.emplace(parent_index, src_node);
  871. }
  872. // Data nodes marked with REF_VAR_SRC_VAR_NAME
  873. // Using variable tensor as data's output
  874. for (auto &node : subgraph->GetDirectNode()) {
  875. if (node->GetType() != DATA) {
  876. continue;
  877. }
  878. string ref_var_name;
  879. (void)AttrUtils::GetStr(node->GetOpDesc(), REF_VAR_SRC_VAR_NAME, ref_var_name);
  880. if (ref_var_name.empty()) {
  881. continue;
  882. }
  883. GELOGD("Data node ref to variable: %s", ref_var_name.c_str());
  884. NodePtr src_node;
  885. auto var_node = hybrid_model_.GetVariableNode(ref_var_name);
  886. GE_CHECK_NOTNULL(var_node);
  887. GELOGD("Found var node [%s] by ref_var_name [%s]", var_node->GetName().c_str(), ref_var_name.c_str());
  888. int peer_output_index = -1;
  889. GE_CHK_STATUS_RET_NOLOG(GetPeerNodeAcrossSubGraphs(node, src_node, peer_output_index));
  890. auto src_node_item = MutableNodeItem(src_node);
  891. GE_CHECK_NOTNULL(src_node_item);
  892. src_node_item->ref_outputs.emplace(peer_output_index, var_node);
  893. }
  894. return SUCCESS;
  895. }
  896. NodePtr HybridModelBuilder::GetPeerNode(const InDataAnchorPtr &in_data_anchor) {
  897. auto peer_out_anchor = in_data_anchor->GetPeerOutAnchor();
  898. if (peer_out_anchor != nullptr) {
  899. return peer_out_anchor->GetOwnerNode();
  900. }
  901. return nullptr;
  902. }
  903. Status HybridModelBuilder::GetParentNodeOutputIndex(const OpDesc &op_desc, int index, uint32_t &out_index) {
  904. auto input_desc = op_desc.MutableInputDesc(index);
  905. GE_CHECK_NOTNULL(input_desc);
  906. if (!AttrUtils::GetInt(input_desc, ATTR_NAME_PARENT_NODE_INDEX, out_index)) {
  907. GELOGE(INTERNAL_ERROR, "NetOutput input tensor %d, attr %s not found.", index, ATTR_NAME_PARENT_NODE_INDEX.c_str());
  908. return INTERNAL_ERROR;
  909. }
  910. return SUCCESS;
  911. }
  912. Status HybridModelBuilder::InitModelMem() {
  913. hybrid_model_.var_mem_base_ = var_manager_->GetVarMemoryBase(RT_MEMORY_HBM);
  914. auto total_var_size = hybrid_model_.TotalVarMemSize();
  915. if (total_var_size == 0 && !hybrid_model_.constant_op_nodes_.empty()) {
  916. total_var_size = var_manager_->GetVarMemSize(RT_MEMORY_HBM) > 0 ? var_manager_->GetVarMemMaxSize() : 0;
  917. GELOGD("Model var size = 0. but got uninitialized constant. set var size to %zu.", total_var_size);
  918. }
  919. if (total_var_size > 0 && hybrid_model_.var_mem_base_ == nullptr) {
  920. GE_CHK_STATUS_RET(var_manager_->MallocVarMemory(total_var_size), "Malloc Var Memory Fail.");
  921. hybrid_model_.var_mem_base_ = var_manager_->GetVarMemoryBase(RT_MEMORY_HBM);
  922. }
  923. runtime_param_.var_base = hybrid_model_.var_mem_base_;
  924. return SUCCESS;
  925. }
  926. Status HybridModelBuilder::TransAllVarData() {
  927. GELOGI("TransAllVarData start: session_id:%lu, graph_id: %u.", runtime_param_.session_id, runtime_param_.graph_id);
  928. rtContext_t ctx = nullptr;
  929. rtError_t rt_ret = rtCtxGetCurrent(&ctx);
  930. if (rt_ret != RT_ERROR_NONE) {
  931. GELOGE(RT_FAILED, "Failed to get current context, error_code is: 0x%X.", rt_ret);
  932. return RT_FAILED;
  933. }
  934. std::vector<NodePtr> variable_node_list;
  935. for (auto &it : hybrid_model_.variable_nodes_) {
  936. variable_node_list.emplace_back(it.second);
  937. GELOGD("[%s] added for trans var data", it.first.c_str());
  938. }
  939. GE_CHK_STATUS_RET(
  940. TransVarDataUtils::TransAllVarData(variable_node_list, runtime_param_.session_id, ctx, runtime_param_.graph_id),
  941. "TransAllVarData failed.");
  942. GELOGI("TransAllVarData success.");
  943. return SUCCESS;
  944. }
  945. Status HybridModelBuilder::CopyVarData() {
  946. GE_CHK_STATUS_RET(
  947. TransVarDataUtils::CopyVarData(ge_root_model_->GetRootGraph(), runtime_param_.session_id, hybrid_model_.device_id_),
  948. "CopyVarData failed.");
  949. GELOGI("CopyVarData success.");
  950. return SUCCESS;
  951. }
  952. Status HybridModelBuilder::LoadKnownShapedSubgraph(ComputeGraph &graph, NodeItem *parent_node_item) {
  953. GELOGD("Start to load known shaped subgraph [%s]", graph.GetName().c_str());
  954. auto graph_item = std::unique_ptr<GraphItem>(new (std::nothrow) GraphItem());
  955. GE_CHECK_NOTNULL(graph_item);
  956. graph_item->is_dynamic_ = false;
  957. auto subgraph_name = graph.GetName();
  958. auto wrapper_op_desc = MakeShared<OpDesc>(subgraph_name + "_partitioned_call", PARTITIONEDCALL);
  959. GE_CHECK_NOTNULL(wrapper_op_desc);
  960. for (auto &node : graph.GetDirectNode()) {
  961. GE_CHECK_NOTNULL(node);
  962. auto op_desc = node->GetOpDesc();
  963. GE_CHECK_NOTNULL(op_desc);
  964. const auto &op_type = node->GetType();
  965. if (op_type == DATA) {
  966. int32_t data_index = 0;
  967. if (!AttrUtils::GetInt(node->GetOpDesc(), ATTR_NAME_PARENT_NODE_INDEX, data_index)) {
  968. GELOGE(FAILED, "[%s] Failed to get attr [%s]", node->GetName().c_str(), ATTR_NAME_PARENT_NODE_INDEX.c_str());
  969. return FAILED;
  970. }
  971. (void)wrapper_op_desc->AddInputDesc(op_desc->GetInputDesc(0));
  972. graph_item->input_index_mapping_.emplace_back(data_index);
  973. } else if (op_type == NETOUTPUT) {
  974. int output_index = 0;
  975. for (const auto &output_desc : op_desc->GetAllInputsDescPtr()) {
  976. int32_t data_index = output_index++;
  977. if (!AttrUtils::GetInt(output_desc, ATTR_NAME_PARENT_NODE_INDEX, data_index)) {
  978. GELOGI("[%s] Failed to get attr [%s]", node->GetName().c_str(), ATTR_NAME_PARENT_NODE_INDEX.c_str());
  979. }
  980. GE_CHK_GRAPH_STATUS_RET(wrapper_op_desc->AddOutputDesc(*output_desc),
  981. "[%s] Failed to add output desc. output index = %d", graph.GetName().c_str(),
  982. output_index);
  983. graph_item->output_index_mapping_.emplace_back(data_index);
  984. }
  985. }
  986. }
  987. auto temp_graph = MakeShared<ComputeGraph>("temp");
  988. GE_CHECK_NOTNULL(temp_graph);
  989. auto wrapper_node = temp_graph->AddNode(wrapper_op_desc);
  990. GeModelPtr ge_model = subgraph_models_[subgraph_name];
  991. GE_CHECK_NOTNULL(ge_model);
  992. hybrid_model_.known_shape_sub_models_.emplace(wrapper_node, ge_model);
  993. NodeItem *node_item = nullptr;
  994. GE_CHK_STATUS_RET_NOLOG(GetOrCreateNodeItem(wrapper_node, &node_item));
  995. node_item->input_start = 0;
  996. node_item->output_start = 0;
  997. node_item->outputs.resize(node_item->num_outputs);
  998. graph_item->node_items_.emplace_back(node_item);
  999. graph_item->output_node_ = node_item;
  1000. graph_item->total_inputs_ = node_item->num_inputs;
  1001. graph_item->total_outputs_ = node_item->num_outputs;
  1002. GELOGD("NodeItem create for known shape subgraph [%s], NodeItem = %s", graph.GetName().c_str(),
  1003. node_item->DebugString().c_str());
  1004. GELOGD("Done parse known shape subgraph successfully. graph = [%s]", graph.GetName().c_str());
  1005. graph_item->SetName(graph.GetName());
  1006. GELOGD("Done loading known shape subgraph: [%s]", graph_item->GetName().c_str());
  1007. hybrid_model_.subgraph_items_.emplace(graph.GetName(), std::move(graph_item));
  1008. return SUCCESS;
  1009. }
  1010. Status HybridModelBuilder::LoadDynamicSubgraph(ComputeGraph &graph, bool is_root_graph) {
  1011. GELOGD("Start to load subgraph [%s]", graph.GetName().c_str());
  1012. // for known partitioned call, load all nodes
  1013. auto graph_item = std::unique_ptr<GraphItem>(new (std::nothrow) GraphItem());
  1014. GE_CHECK_NOTNULL(graph_item);
  1015. graph_item->is_dynamic_ = true;
  1016. graph_item->node_items_.reserve(graph.GetDirectNodesSize());
  1017. int input_start = 0;
  1018. int output_start = 0;
  1019. std::vector<NodeItem *> data_nodes;
  1020. for (auto &node : graph.GetDirectNode()) {
  1021. GE_CHECK_NOTNULL(node);
  1022. GE_CHECK_NOTNULL(node->GetOpDesc());
  1023. const auto &op_type = node->GetType();
  1024. NodeItem *node_item = nullptr;
  1025. GE_CHK_STATUS_RET_NOLOG(GetOrCreateNodeItem(node, &node_item));
  1026. GE_CHK_STATUS_RET_NOLOG(BuildNodeItem(node, *node_item));
  1027. GE_CHK_STATUS_RET_NOLOG(UpdateAnchorStatus(node)); // needed by FE generate task
  1028. node_item->input_start = input_start;
  1029. node_item->output_start = output_start;
  1030. input_start += node_item->num_inputs;
  1031. output_start += node_item->num_outputs;
  1032. if (op_type == DATA_TYPE || op_type == AIPP_DATA_TYPE) {
  1033. data_nodes.emplace_back(node_item);
  1034. } else if (op_type == NETOUTPUT) {
  1035. graph_item->output_node_ = node_item;
  1036. GE_CHK_STATUS_RET_NOLOG(BuildOutputMapping(*graph_item, *node_item, is_root_graph));
  1037. }
  1038. graph_item->node_items_.emplace_back(node_item);
  1039. // parse var outputs
  1040. GE_CHK_STATUS_RET_NOLOG(ParseVarOutputs(*node_item));
  1041. GELOGD("NodeItem created: %s", node_item->DebugString().c_str());
  1042. }
  1043. graph_item->total_inputs_ = input_start;
  1044. graph_item->total_outputs_ = output_start;
  1045. GE_CHK_STATUS_RET_NOLOG(BuildInputMapping(*graph_item, data_nodes, is_root_graph));
  1046. if (is_root_graph) {
  1047. graph_item->SetName("Root-Graph");
  1048. GELOGD("Done loading dynamic subgraph: [%s]", graph_item->GetName().c_str());
  1049. hybrid_model_.root_graph_item_ = std::move(graph_item);
  1050. } else {
  1051. graph_item->SetName(graph.GetName());
  1052. GELOGD("Done loading dynamic subgraph: [%s]", graph_item->GetName().c_str());
  1053. hybrid_model_.subgraph_items_.emplace(graph.GetName(), std::move(graph_item));
  1054. }
  1055. return SUCCESS;
  1056. }
  1057. Status HybridModelBuilder::ParseVarOutputs(NodeItem &node_item) {
  1058. for (int i = 0; i < node_item.num_outputs; ++i) {
  1059. auto output_tensor_desc = node_item.op_desc->GetOutputDesc(i);
  1060. std::string var_name;
  1061. (void)AttrUtils::GetStr(output_tensor_desc, ASSIGN_VAR_NAME, var_name);
  1062. if (!var_name.empty()) {
  1063. auto var_node = hybrid_model_.GetVariableNode(var_name);
  1064. GE_CHECK_NOTNULL(var_node);
  1065. node_item.ref_outputs.emplace(i, var_node);
  1066. }
  1067. }
  1068. return SUCCESS;
  1069. }
  1070. Status HybridModelBuilder::BuildInputMapping(GraphItem &graph_item, vector<NodeItem *> &data_nodes,
  1071. bool is_root_graph) {
  1072. uint32_t data_op_index = 0;
  1073. for (auto &node_item : data_nodes) {
  1074. auto node = node_item->node;
  1075. int data_index = data_op_index;
  1076. if (is_root_graph) {
  1077. if (AttrUtils::GetInt(node->GetOpDesc(), ATTR_NAME_INDEX, data_index)) {
  1078. GELOGI("ge_train: get new index %u, old %u", data_index, data_op_index);
  1079. }
  1080. data_op_index++;
  1081. } else {
  1082. if (!AttrUtils::GetInt(node->GetOpDesc(), ATTR_NAME_PARENT_NODE_INDEX, data_index)) {
  1083. GELOGE(FAILED, "[%s] Failed to get attr [%s]", node->GetName().c_str(), ATTR_NAME_PARENT_NODE_INDEX.c_str());
  1084. return FAILED;
  1085. }
  1086. }
  1087. if (graph_item.input_nodes_.size() <= static_cast<size_t>(data_index)) {
  1088. graph_item.input_nodes_.resize(data_index + 1);
  1089. }
  1090. graph_item.input_nodes_[data_index] = node_item;
  1091. }
  1092. return SUCCESS;
  1093. }
  1094. } // namespace hybrid
  1095. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示