You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

cond_pass.cc 17 kB

5 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/passes/cond_pass.h"
  17. #include "common/op/ge_op_utils.h"
  18. #include "graph/utils/graph_utils.h"
  19. #include "graph/utils/type_utils.h"
  20. #include "graph/utils/node_utils.h"
  21. namespace {
  22. const std::string kStringLength = "StringLength";
  23. }
  24. namespace ge {
  25. Status CondPass::Run(NodePtr &node) {
  26. ComputeGraphPtr graph = nullptr;
  27. OutDataAnchorPtr peer_out_anchor = nullptr;
  28. InDataAnchorPtr cond_in_anchor = nullptr;
  29. Status ret = GetCondInfo(node, graph, peer_out_anchor, cond_in_anchor);
  30. if (ret == NOT_CHANGED) {
  31. return SUCCESS;
  32. } else if (ret != SUCCESS) {
  33. GELOGE(FAILED, "[Get][CondInfo] for node %s failed.", node->GetName().c_str());
  34. return FAILED;
  35. }
  36. /// cond
  37. /// 1. NonScalar: cond->Size(int32)->If / NetOutput(while)
  38. /// 2. String Scalar: cond->StringLength(int32)->If / NetOutput(while)
  39. /// 3. bool / float / double / uint8 / int16 / int8 / int64 Scalar: cond->Cast(2int32)->If / NetOutput(while)
  40. /// 4. Int32 Scalar: cond->If / NetOutput(while)
  41. OpDescPtr op_desc = cond_in_anchor->GetOwnerNode()->GetOpDesc();
  42. GE_CHECK_NOTNULL(op_desc);
  43. GELOGI("Handle cond for node %s.", op_desc->GetName().c_str());
  44. GeTensorDesc cond_tensor = op_desc->GetInputDesc(cond_in_anchor->GetIdx());
  45. if (cond_tensor.MutableShape().GetDim(0) == UNKNOWN_DIM_NUM) {
  46. GELOGI("Output tensor rank of Cond is unknown.");
  47. if (cond_tensor.GetDataType() == DT_STRING) {
  48. GE_CHK_STATUS_RET(HandleStringCond(graph, peer_out_anchor, cond_in_anchor),
  49. "[Handle][StringCond] for op:%s failed.", op_desc->GetName().c_str())
  50. }
  51. return SUCCESS;
  52. }
  53. if (!cond_tensor.GetShape().IsScalar()) {
  54. GE_CHK_STATUS_RET(HandleNonScalarCond(graph, peer_out_anchor, cond_in_anchor),
  55. "[Handle][NonScalarCond] for op:%s failed.", op_desc->GetName().c_str())
  56. } else {
  57. switch (cond_tensor.GetDataType()) {
  58. case DT_STRING:
  59. GE_CHK_STATUS_RET(HandleStringCond(graph, peer_out_anchor, cond_in_anchor),
  60. "[Handle][StringCond] for op:%s failed.", op_desc->GetName().c_str())
  61. break;
  62. case DT_BOOL:
  63. case DT_FLOAT:
  64. case DT_DOUBLE:
  65. case DT_UINT8:
  66. case DT_INT16:
  67. case DT_INT8:
  68. case DT_INT64:
  69. GE_CHK_STATUS_RET(HandleScalarCond(graph, peer_out_anchor, cond_in_anchor, cond_tensor.GetDataType()),
  70. "[Handle][ScalarCond] for op:%s failed.", op_desc->GetName().c_str())
  71. break;
  72. case DT_INT32:
  73. break;
  74. default:
  75. REPORT_INNER_ERROR("E19999",
  76. "data_type:%d of index:%d input tensor in op:%s(%s) check invalid",
  77. cond_tensor.GetDataType(), cond_in_anchor->GetIdx(),
  78. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  79. GELOGE(FAILED, "[Check][Param] data_type:%d of index:%d input tensor in op:%s(%s) is invalid",
  80. cond_tensor.GetDataType(), cond_in_anchor->GetIdx(),
  81. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  82. return FAILED;
  83. }
  84. }
  85. cond_tensor.SetDataType(DT_INT32);
  86. cond_tensor.SetOriginDataType(DT_INT32);
  87. cond_tensor.SetShape(GeShape());
  88. cond_tensor.SetOriginShape(GeShape());
  89. if (op_desc->UpdateInputDesc(cond_in_anchor->GetIdx(), cond_tensor) != GRAPH_SUCCESS) {
  90. REPORT_CALL_ERROR("E19999", "Update input desc of op:%s(%s) failed, index:%d",
  91. op_desc->GetName().c_str(), op_desc->GetType().c_str(), cond_in_anchor->GetIdx());
  92. GELOGE(FAILED, "[Update][InputDesc] for op:%s(%s) failed, index:%d",
  93. op_desc->GetName().c_str(), op_desc->GetType().c_str(), cond_in_anchor->GetIdx());
  94. return FAILED;
  95. }
  96. return SUCCESS;
  97. }
  98. ///
  99. /// @brief Get cond info for if / while
  100. /// @param [in] node: If / While op
  101. /// @param [out] graph: owner_graph of if node / while_cond subgraph
  102. /// @param [out] peer_out_anchor: peer_cond_anchor
  103. /// @param [out] cond_in_anchor: cond_input
  104. /// @return Status
  105. ///
  106. Status CondPass::GetCondInfo(const NodePtr &node, ComputeGraphPtr &graph, OutDataAnchorPtr &peer_out_anchor,
  107. InDataAnchorPtr &cond_in_anchor) {
  108. GE_CHECK_NOTNULL(node);
  109. std::string type = node->GetType();
  110. if (kIfOpTypes.count(type) != 0) {
  111. if (GetCondInfoForIf(node, graph, peer_out_anchor, cond_in_anchor) != SUCCESS) {
  112. GELOGE(FAILED, "[Get][CondInfo] for if node:%s failed.", node->GetName().c_str());
  113. return FAILED;
  114. }
  115. } else if (kWhileOpTypes.count(type) != 0) {
  116. if (GetCondInfoForWhile(node, graph, peer_out_anchor, cond_in_anchor) != SUCCESS) {
  117. GELOGE(FAILED, "[Get][CondInfo] for while node:%s failed.", node->GetName().c_str());
  118. return FAILED;
  119. }
  120. } else {
  121. GELOGD("no need cond_pass for node %s.", node->GetName().c_str());
  122. return NOT_CHANGED;
  123. }
  124. return SUCCESS;
  125. }
  126. ///
  127. /// @brief Get cond info for if node
  128. /// @param [in] node: If op
  129. /// @param [out] graph: owner_graph of if node
  130. /// @param [out] peer_out_anchor: peer_cond_anchor
  131. /// @param [out] cond_in_anchor: cond_input of if
  132. /// @return Status
  133. ///
  134. Status CondPass::GetCondInfoForIf(const NodePtr &node, ComputeGraphPtr &graph, OutDataAnchorPtr &peer_out_anchor,
  135. InDataAnchorPtr &cond_in_anchor) {
  136. GE_CHECK_NOTNULL(node);
  137. graph = node->GetOwnerComputeGraph();
  138. GE_CHECK_NOTNULL(graph);
  139. cond_in_anchor = node->GetInDataAnchor(IF_COND_INPUT);
  140. GE_CHECK_NOTNULL(cond_in_anchor);
  141. peer_out_anchor = cond_in_anchor->GetPeerOutAnchor();
  142. GE_CHECK_NOTNULL(peer_out_anchor);
  143. return SUCCESS;
  144. }
  145. ///
  146. /// @brief Get cond info for while node
  147. /// @param [in] node: While op
  148. /// @param [out] graph: while_cond subgraph
  149. /// @param [out] peer_out_anchor: peer_cond_anchor
  150. /// @param [out] cond_in_anchor: input of NetOutput in cond_graph
  151. /// @return Status
  152. ///
  153. Status CondPass::GetCondInfoForWhile(const NodePtr &node, ComputeGraphPtr &graph, OutDataAnchorPtr &peer_out_anchor,
  154. InDataAnchorPtr &cond_in_anchor) {
  155. GE_CHECK_NOTNULL(node);
  156. OpDescPtr op_desc = node->GetOpDesc();
  157. GE_CHECK_NOTNULL(op_desc);
  158. std::map<std::string, uint32_t> subgraph_names_to_index = op_desc->GetSubgraphNameIndexes();
  159. auto iter = subgraph_names_to_index.find(ATTR_NAME_WHILE_COND);
  160. if (iter == subgraph_names_to_index.end()) {
  161. REPORT_INNER_ERROR("E19999", "subgraph name:%s not exist in SubgraphNameIndexes map of op:%s(%s), "
  162. "check invalid", ATTR_NAME_WHILE_COND.c_str(),
  163. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  164. GELOGE(FAILED, "subgraph name:%s not exist in SubgraphNameIndexes map of op:%s(%s)", ATTR_NAME_WHILE_COND.c_str(),
  165. op_desc->GetName().c_str(), op_desc->GetType().c_str());
  166. return FAILED;
  167. }
  168. std::string cond_graph_instance_name = op_desc->GetSubgraphInstanceName(iter->second);
  169. graph = GraphUtils::FindRootGraph(node->GetOwnerComputeGraph())->GetSubgraph(cond_graph_instance_name);
  170. GE_CHECK_NOTNULL(graph);
  171. NodePtr net_output_node = graph->FindFirstNodeMatchType(NETOUTPUT);
  172. GE_CHECK_NOTNULL(net_output_node);
  173. // cond_graph has and only has one output
  174. uint32_t output_num = net_output_node->GetAllInDataAnchorsSize();
  175. if (output_num != 1) {
  176. REPORT_INNER_ERROR("E19999", "Input data anchor num:%u of op:%s(%s) not equal to 1, check invalid",
  177. output_num, op_desc->GetName().c_str(), op_desc->GetType().c_str());
  178. GELOGE(FAILED, "[Check][Param] output size of cond_graph is invalid, expect 1 but %u exactly, while_node:%s.",
  179. output_num, node->GetName().c_str());
  180. return FAILED;
  181. }
  182. cond_in_anchor = net_output_node->GetInDataAnchor(0);
  183. GE_CHECK_NOTNULL(cond_in_anchor);
  184. peer_out_anchor = cond_in_anchor->GetPeerOutAnchor();
  185. GE_CHECK_NOTNULL(peer_out_anchor);
  186. return SUCCESS;
  187. }
  188. ///
  189. /// @brief Process Cond Op with non-scalar cond_input: cond->Size->If / NetOutput(while)
  190. /// @param [in] graph
  191. /// @param [in] peer_out_anchor: peer_cond_anchor
  192. /// @param [in] cond_in_anchor: cond_input
  193. /// @return Status
  194. ///
  195. Status CondPass::HandleNonScalarCond(const ComputeGraphPtr &graph, const OutDataAnchorPtr &peer_out_anchor,
  196. const InDataAnchorPtr &cond_in_anchor) {
  197. GELOGI("Handle cond with non-scalar cond-input.");
  198. return InsertNode(graph, peer_out_anchor, cond_in_anchor, SIZE);
  199. }
  200. ///
  201. /// @brief Process Cond Op with scalar-string cond_input: cond->StringLength(int32)->If / NetOutput(while)
  202. /// @param [in] graph
  203. /// @param [in] peer_out_anchor: peer_cond_anchor
  204. /// @param [in] cond_in_anchor: cond_input
  205. /// @return Status
  206. ///
  207. Status CondPass::HandleStringCond(const ComputeGraphPtr &graph, const OutDataAnchorPtr &peer_out_anchor,
  208. const InDataAnchorPtr &cond_in_anchor) {
  209. GELOGI("Handle cond with scalar-string cond-input.");
  210. return InsertNode(graph, peer_out_anchor, cond_in_anchor, kStringLength);
  211. }
  212. ///
  213. /// @brief Process Cond Op with scalar cond_input: cond->Cast(2int32)->If / NetOutput(while)
  214. /// @param [in] graph
  215. /// @param [in] peer_out_anchor: peer_cond_anchor
  216. /// @param [in] cond_in_anchor: cond_input
  217. /// @param [in] src_type
  218. /// @return Status
  219. ///
  220. Status CondPass::HandleScalarCond(const ComputeGraphPtr &graph, const OutDataAnchorPtr &peer_out_anchor,
  221. const InDataAnchorPtr &cond_in_anchor, DataType src_type) {
  222. GE_CHECK_NOTNULL(cond_in_anchor);
  223. GE_CHECK_NOTNULL(peer_out_anchor);
  224. GE_CHECK_NOTNULL(peer_out_anchor->GetOwnerNode()->GetOpDesc());
  225. GELOGI("Handle cond with scalar cond-input.");
  226. GeTensorDesc tensor = peer_out_anchor->GetOwnerNode()->GetOpDesc()->GetOutputDesc(peer_out_anchor->GetIdx());
  227. std::string cast_name = cond_in_anchor->GetOwnerNode()->GetName() + "_Cast";
  228. NodePtr cast_node = AddCastNode(graph, cast_name, tensor, src_type, DT_INT32);
  229. if (cast_node == nullptr) {
  230. GELOGE(FAILED, "[Add][CastNode] failed, name:%s.", cast_name.c_str());
  231. return FAILED;
  232. }
  233. if (GraphUtils::InsertNodeAfter(peer_out_anchor, { cond_in_anchor }, cast_node) != GRAPH_SUCCESS) {
  234. REPORT_CALL_ERROR("E19999", "Insert Cast node %s(%s) between %s(%s)->%s(%s) failed",
  235. cast_node->GetName().c_str(), cast_node->GetType().c_str(),
  236. peer_out_anchor->GetOwnerNode()->GetName().c_str(),
  237. peer_out_anchor->GetOwnerNode()->GetType().c_str(),
  238. cond_in_anchor->GetOwnerNode()->GetName().c_str(),
  239. cond_in_anchor->GetOwnerNode()->GetType().c_str());
  240. GELOGE(FAILED, "[Insert][CastNode] %s between %s->%s failed.",
  241. cast_node->GetName().c_str(), peer_out_anchor->GetOwnerNode()->GetName().c_str(),
  242. cond_in_anchor->GetOwnerNode()->GetName().c_str());
  243. return FAILED;
  244. }
  245. return SUCCESS;
  246. }
  247. ///
  248. /// @brief Insert node
  249. /// @param [in] graph
  250. /// @param [in] peer_out_anchor
  251. /// @param [in] in_data_anchor
  252. /// @param [in] type
  253. /// @return Status
  254. ///
  255. Status CondPass::InsertNode(const ComputeGraphPtr &graph, const OutDataAnchorPtr &peer_out_anchor,
  256. const InDataAnchorPtr &in_data_anchor, const std::string &type) {
  257. GE_CHECK_NOTNULL(peer_out_anchor);
  258. GE_CHECK_NOTNULL(in_data_anchor);
  259. GELOGD("Begin to insert %s node.", type.c_str());
  260. GE_CHECK_NOTNULL(peer_out_anchor->GetOwnerNode()->GetOpDesc());
  261. GE_CHECK_NOTNULL(in_data_anchor->GetOwnerNode()->GetOpDesc());
  262. GeTensorDesc in_tensor = peer_out_anchor->GetOwnerNode()->GetOpDesc()->GetOutputDesc(peer_out_anchor->GetIdx());
  263. GeTensorDesc out_tensor = in_data_anchor->GetOwnerNode()->GetOpDesc()->GetInputDesc(in_data_anchor->GetIdx());
  264. out_tensor.SetDataType(DT_INT32);
  265. out_tensor.SetOriginDataType(DT_INT32);
  266. out_tensor.SetShape(in_tensor.GetShape());
  267. out_tensor.SetOriginShape(in_tensor.GetOriginShape());
  268. OpDescBuilder op_desc_builder(in_data_anchor->GetOwnerNode()->GetName() + "_" + type, type);
  269. OpDescPtr op_desc = op_desc_builder.AddInput("x", in_tensor).AddOutput("y", out_tensor).Build();
  270. if (op_desc == nullptr) {
  271. REPORT_CALL_ERROR("E19999", "Create op_desc:%s(%s) failed",
  272. (in_data_anchor->GetOwnerNode()->GetName() + "_" + type).c_str(), type.c_str());
  273. GELOGE(FAILED, "[Create][OpDesc] %s(%s) failed.",
  274. (in_data_anchor->GetOwnerNode()->GetName() + "_" + type).c_str(), type.c_str());
  275. return FAILED;
  276. }
  277. NodePtr new_node = graph->AddNode(op_desc);
  278. if (new_node == nullptr) {
  279. REPORT_CALL_ERROR("E19999", "Add node:%s(%s) to graph:%s failed",
  280. op_desc->GetName().c_str(), op_desc->GetType().c_str(), graph->GetName().c_str());
  281. GELOGE(FAILED, "[Add][Node] %s(%s) to graph:%s failed",
  282. op_desc->GetName().c_str(), op_desc->GetType().c_str(), graph->GetName().c_str());
  283. return FAILED;
  284. }
  285. AddRePassNode(new_node);
  286. if (GraphUtils::InsertNodeAfter(peer_out_anchor, { in_data_anchor }, new_node) != GRAPH_SUCCESS) {
  287. REPORT_CALL_ERROR("E19999", "Insert node %s(%s) between %s(%s)->%s(%s) failed",
  288. new_node->GetName().c_str(), new_node->GetType().c_str(),
  289. peer_out_anchor->GetOwnerNode()->GetName().c_str(),
  290. peer_out_anchor->GetOwnerNode()->GetType().c_str(),
  291. in_data_anchor->GetOwnerNode()->GetName().c_str(),
  292. in_data_anchor->GetOwnerNode()->GetType().c_str());
  293. GELOGE(FAILED, "[Insert][Node] %s(%s) between %s(%s)->%s(%s) failed",
  294. new_node->GetName().c_str(), new_node->GetType().c_str(),
  295. peer_out_anchor->GetOwnerNode()->GetName().c_str(), peer_out_anchor->GetOwnerNode()->GetType().c_str(),
  296. in_data_anchor->GetOwnerNode()->GetName().c_str(), in_data_anchor->GetOwnerNode()->GetType().c_str());
  297. return FAILED;
  298. }
  299. return SUCCESS;
  300. }
  301. ///
  302. /// @brief Add cast node
  303. /// @param [in] graph
  304. /// @param [in] name
  305. /// @param [in] tensor
  306. /// @param [in] src
  307. /// @param [in] dst
  308. /// @return NodePtr
  309. ///
  310. NodePtr CondPass::AddCastNode(const ComputeGraphPtr &graph, const std::string &name, const GeTensorDesc &tensor,
  311. DataType src, DataType dst) {
  312. GELOGI("Begin to create cast op: %s, from %d to %d", name.c_str(), src, dst);
  313. GeTensorDesc in_tensor = tensor;
  314. in_tensor.SetDataType(src);
  315. in_tensor.SetOriginDataType(src);
  316. GeTensorDesc out_tensor = tensor;
  317. out_tensor.SetDataType(dst);
  318. out_tensor.SetOriginDataType(dst);
  319. OpDescBuilder op_desc_builder(name, CAST);
  320. OpDescPtr cast_desc = op_desc_builder.AddInput("x", in_tensor).AddOutput("y", out_tensor).Build();
  321. if (cast_desc == nullptr) {
  322. REPORT_CALL_ERROR("E19999", "Create op_desc:%s(%s) failed", name.c_str(), CAST);
  323. GELOGE(FAILED, "[Create][OpDesc] failed, name:%s(%s).", name.c_str(), CAST);
  324. return nullptr;
  325. }
  326. if (!(AttrUtils::SetInt(cast_desc, CAST_ATTR_SRCT, src) &&
  327. AttrUtils::SetInt(cast_desc, CAST_ATTR_DSTT, dst) &&
  328. AttrUtils::SetInt(cast_desc, CAST_ATTR_DST_TYPE, dst) &&
  329. AttrUtils::SetBool(cast_desc, CAST_ATTR_TRUNCATE, false))) {
  330. REPORT_CALL_ERROR("E19999", "Set Attr:%s, %s, %s, %s to node:%s(%s) not all success",
  331. CAST_ATTR_SRCT.c_str(), CAST_ATTR_DSTT.c_str(),
  332. CAST_ATTR_DST_TYPE.c_str(), CAST_ATTR_TRUNCATE.c_str(),
  333. cast_desc->GetName().c_str(), cast_desc->GetType().c_str());
  334. GELOGE(FAILED, "[Set][Attr] %s, %s, %s, %s to node:%s(%s) not all success",
  335. CAST_ATTR_SRCT.c_str(), CAST_ATTR_DSTT.c_str(),
  336. CAST_ATTR_DST_TYPE.c_str(), CAST_ATTR_TRUNCATE.c_str(),
  337. cast_desc->GetName().c_str(), cast_desc->GetType().c_str());
  338. return nullptr;
  339. }
  340. NodePtr cast_node = graph->AddNode(cast_desc);
  341. if (cast_node == nullptr) {
  342. REPORT_CALL_ERROR("E19999", "Add node:%s(%s) to graph:%s failed",
  343. cast_desc->GetName().c_str(), cast_desc->GetType().c_str(), graph->GetName().c_str());
  344. GELOGE(FAILED, "[Add][Node] %s(%s) to graph:%s failed",
  345. cast_desc->GetName().c_str(), cast_desc->GetType().c_str(), graph->GetName().c_str());
  346. return nullptr;
  347. }
  348. AddRePassNode(cast_node);
  349. return cast_node;
  350. }
  351. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示