You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

cond_pass.cc 13 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/passes/cond_pass.h"
  17. #include "common/op/ge_op_utils.h"
  18. #include "graph/utils/graph_utils.h"
  19. #include "graph/utils/type_utils.h"
  20. #include "graph/utils/node_utils.h"
  21. namespace {
  22. const std::string kStringLength = "StringLength";
  23. }
  24. namespace ge {
  25. Status CondPass::Run(NodePtr &node) {
  26. ComputeGraphPtr graph = nullptr;
  27. OutDataAnchorPtr cond_out_anchor = nullptr;
  28. InDataAnchorPtr cond_in_anchor = nullptr;
  29. Status ret = GetCondInfo(node, graph, cond_out_anchor, cond_in_anchor);
  30. if (ret == NOT_CHANGED) {
  31. return SUCCESS;
  32. } else if (ret != SUCCESS) {
  33. GELOGE(FAILED, "Get cond_info for node %s failed.", node->GetName().c_str());
  34. return FAILED;
  35. }
  36. /// cond
  37. /// 1. NonScalar: cond->Size(int32)->If / NetOutput(while)
  38. /// 2. String Scalar: cond->StringLength(int32)->If / NetOutput(while)
  39. /// 3. bool / float / double / uint8 / int16 / int8 / int64 Scalar: cond->Cast(2int32)->If / NetOutput(while)
  40. /// 4. Int32 Scalar: cond->If / NetOutput(while)
  41. OpDescPtr op_desc = cond_in_anchor->GetOwnerNode()->GetOpDesc();
  42. GE_CHECK_NOTNULL(op_desc);
  43. GELOGI("Handle cond for node %s.", op_desc->GetName().c_str());
  44. GeTensorDesc cond_tensor = op_desc->GetInputDesc(cond_in_anchor->GetIdx());
  45. if (cond_tensor.MutableShape().GetDim(0) == UNKNOWN_DIM_NUM) {
  46. GELOGI("Output tensor rank of Cond is unknown.");
  47. if (cond_tensor.GetDataType() == DT_STRING) {
  48. GE_CHK_STATUS_RET(HandleStringCond(graph, cond_out_anchor, cond_in_anchor), "HandleStringCond for %s failed.",
  49. op_desc->GetName().c_str())
  50. }
  51. return SUCCESS;
  52. }
  53. if (!cond_tensor.GetShape().IsScalar()) {
  54. GE_CHK_STATUS_RET(HandleNonScalarCond(graph, cond_out_anchor, cond_in_anchor), "HandleNonScalarCond for %s failed.",
  55. op_desc->GetName().c_str())
  56. } else {
  57. switch (cond_tensor.GetDataType()) {
  58. case DT_STRING:
  59. GE_CHK_STATUS_RET(HandleStringCond(graph, cond_out_anchor, cond_in_anchor), "HandleStringCond for %s failed.",
  60. op_desc->GetName().c_str())
  61. break;
  62. case DT_BOOL:
  63. case DT_FLOAT:
  64. case DT_DOUBLE:
  65. case DT_UINT8:
  66. case DT_INT16:
  67. case DT_INT8:
  68. case DT_INT64:
  69. GE_CHK_STATUS_RET(HandleScalarCond(graph, cond_out_anchor, cond_in_anchor, cond_tensor.GetDataType()),
  70. "HandleScalarCond for %s failed.", op_desc->GetName().c_str())
  71. break;
  72. case DT_INT32:
  73. break;
  74. default:
  75. GELOGE(FAILED, "UpdateInputDesc for node %s failed.", op_desc->GetName().c_str());
  76. return FAILED;
  77. }
  78. }
  79. cond_tensor.SetDataType(DT_INT32);
  80. cond_tensor.SetOriginDataType(DT_INT32);
  81. cond_tensor.SetShape(GeShape());
  82. cond_tensor.SetOriginShape(GeShape());
  83. if (op_desc->UpdateInputDesc(cond_in_anchor->GetIdx(), cond_tensor) != GRAPH_SUCCESS) {
  84. GELOGE(FAILED, "UpdateInputDesc for node %s failed.", op_desc->GetName().c_str());
  85. return FAILED;
  86. }
  87. return SUCCESS;
  88. }
  89. ///
  90. /// @brief Get cond info for if / while
  91. /// @param [in] node: If / While op
  92. /// @param [out] graph: owner_graph of if node / while_cond subgraph
  93. /// @param [out] cond_out_anchor: peer_cond_anchor
  94. /// @param [out] cond_in_anchor: cond_input
  95. /// @return Status
  96. ///
  97. Status CondPass::GetCondInfo(const NodePtr &node, ComputeGraphPtr &graph, OutDataAnchorPtr &cond_out_anchor,
  98. InDataAnchorPtr &cond_in_anchor) {
  99. GE_CHECK_NOTNULL(node);
  100. std::string type = node->GetType();
  101. if (kIfOpTypes.count(type) != 0) {
  102. if (GetCondInfoForIf(node, graph, cond_out_anchor, cond_in_anchor) != SUCCESS) {
  103. GELOGE(FAILED, "Get cond_info for if node failed.");
  104. return FAILED;
  105. }
  106. } else if (kWhileOpTypes.count(type) != 0) {
  107. if (GetCondInfoForWhile(node, graph, cond_out_anchor, cond_in_anchor) != SUCCESS) {
  108. GELOGE(FAILED, "Get cond_info for while node failed.");
  109. return FAILED;
  110. }
  111. } else {
  112. GELOGD("no need cond_pass for node %s.", node->GetName().c_str());
  113. return NOT_CHANGED;
  114. }
  115. return SUCCESS;
  116. }
  117. ///
  118. /// @brief Get cond info for if node
  119. /// @param [in] node: If op
  120. /// @param [out] graph: owner_graph of if node
  121. /// @param [out] cond_out_anchor: peer_cond_anchor
  122. /// @param [out] cond_in_anchor: cond_input of if
  123. /// @return Status
  124. ///
  125. Status CondPass::GetCondInfoForIf(const NodePtr &node, ComputeGraphPtr &graph, OutDataAnchorPtr &cond_out_anchor,
  126. InDataAnchorPtr &cond_in_anchor) {
  127. GE_CHECK_NOTNULL(node);
  128. graph = node->GetOwnerComputeGraph();
  129. GE_CHECK_NOTNULL(graph);
  130. cond_in_anchor = node->GetInDataAnchor(IF_COND_INPUT);
  131. GE_CHECK_NOTNULL(cond_in_anchor);
  132. cond_out_anchor = cond_in_anchor->GetPeerOutAnchor();
  133. GE_CHECK_NOTNULL(cond_out_anchor);
  134. return SUCCESS;
  135. }
  136. ///
  137. /// @brief Get cond info for while node
  138. /// @param [in] node: While op
  139. /// @param [out] graph: while_cond subgraph
  140. /// @param [out] cond_out_anchor: peer_cond_anchor
  141. /// @param [out] cond_in_anchor: input of NetOutput in cond_graph
  142. /// @return Status
  143. ///
  144. Status CondPass::GetCondInfoForWhile(const NodePtr &node, ComputeGraphPtr &graph, OutDataAnchorPtr &cond_out_anchor,
  145. InDataAnchorPtr &cond_in_anchor) {
  146. GE_CHECK_NOTNULL(node);
  147. OpDescPtr op_desc = node->GetOpDesc();
  148. GE_CHECK_NOTNULL(op_desc);
  149. std::map<std::string, uint32_t> subgraph_names_to_index = op_desc->GetSubgraphNameIndexes();
  150. auto iter = subgraph_names_to_index.find(ATTR_NAME_WHILE_COND);
  151. if (iter == subgraph_names_to_index.end()) {
  152. GELOGE(FAILED, "Get cond_graph index failed, while_node:%s.", node->GetName().c_str());
  153. return FAILED;
  154. }
  155. std::string cond_graph_instance_name = op_desc->GetSubgraphInstanceName(iter->second);
  156. graph = GraphUtils::FindRootGraph(node->GetOwnerComputeGraph())->GetSubgraph(cond_graph_instance_name);
  157. GE_CHECK_NOTNULL(graph);
  158. NodePtr net_output_node = graph->FindFirstNodeMatchType(NETOUTPUT);
  159. GE_CHECK_NOTNULL(net_output_node);
  160. // cond_graph has and only has one output
  161. uint32_t output_num = net_output_node->GetAllInDataAnchorsSize();
  162. if (output_num != 1) {
  163. GELOGE(FAILED, "output size of cond_graph is invalid, expect 1 but %u exactly, while_node:%s.",
  164. output_num, node->GetName().c_str());
  165. return FAILED;
  166. }
  167. cond_in_anchor = net_output_node->GetInDataAnchor(0);
  168. GE_CHECK_NOTNULL(cond_in_anchor);
  169. cond_out_anchor = cond_in_anchor->GetPeerOutAnchor();
  170. GE_CHECK_NOTNULL(cond_out_anchor);
  171. return SUCCESS;
  172. }
  173. ///
  174. /// @brief Process Cond Op with non-scalar cond_input: cond->Size->If / NetOutput(while)
  175. /// @param [in] graph
  176. /// @param [in] out_anchor: peer_cond_anchor
  177. /// @param [in] in_anchor: cond_input
  178. /// @return Status
  179. ///
  180. Status CondPass::HandleNonScalarCond(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_anchor,
  181. const InDataAnchorPtr &in_anchor) {
  182. GELOGI("Handle cond with non-scalar cond-input.");
  183. return InsertNode(graph, out_anchor, in_anchor, SIZE);
  184. }
  185. ///
  186. /// @brief Process Cond Op with scalar-string cond_input: cond->StringLength(int32)->If / NetOutput(while)
  187. /// @param [in] graph
  188. /// @param [in] out_anchor: peer_cond_anchor
  189. /// @param [in] in_anchor: cond_input
  190. /// @return Status
  191. ///
  192. Status CondPass::HandleStringCond(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_anchor,
  193. const InDataAnchorPtr &in_anchor) {
  194. GELOGI("Handle cond with scalar-string cond-input.");
  195. return InsertNode(graph, out_anchor, in_anchor, kStringLength);
  196. }
  197. ///
  198. /// @brief Process Cond Op with scalar cond_input: cond->Cast(2int32)->If / NetOutput(while)
  199. /// @param [in] graph
  200. /// @param [in] out_anchor: peer_cond_anchor
  201. /// @param [in] in_anchor: cond_input
  202. /// @param [in] src_type
  203. /// @return Status
  204. ///
  205. Status CondPass::HandleScalarCond(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_anchor,
  206. const InDataAnchorPtr &in_anchor, DataType src_type) {
  207. GE_CHECK_NOTNULL(in_anchor);
  208. GE_CHECK_NOTNULL(out_anchor);
  209. GE_CHECK_NOTNULL(out_anchor->GetOwnerNode()->GetOpDesc());
  210. GELOGI("Handle cond with scalar cond-input.");
  211. GeTensorDesc tensor = out_anchor->GetOwnerNode()->GetOpDesc()->GetOutputDesc(out_anchor->GetIdx());
  212. std::string cast_name = in_anchor->GetOwnerNode()->GetName() + "_Cast";
  213. NodePtr cast_node = AddCastNode(graph, cast_name, tensor, src_type, DT_INT32);
  214. if (cast_node == nullptr) {
  215. GELOGE(FAILED, "Add Cast node failed, name:%s.", cast_name.c_str());
  216. return FAILED;
  217. }
  218. if (GraphUtils::InsertNodeAfter(out_anchor, { in_anchor }, cast_node) != GRAPH_SUCCESS) {
  219. GELOGE(FAILED, "Insert Cast node %s between %s->%s failed.",
  220. cast_node->GetName().c_str(), out_anchor->GetOwnerNode()->GetName().c_str(),
  221. in_anchor->GetOwnerNode()->GetName().c_str());
  222. return FAILED;
  223. }
  224. return SUCCESS;
  225. }
  226. ///
  227. /// @brief Insert node
  228. /// @param [in] graph
  229. /// @param [in] out_anchor
  230. /// @param [in] in_anchor
  231. /// @param [in] type
  232. /// @return Status
  233. ///
  234. Status CondPass::InsertNode(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_anchor,
  235. const InDataAnchorPtr &in_anchor, const std::string &type) {
  236. GE_CHECK_NOTNULL(out_anchor);
  237. GE_CHECK_NOTNULL(in_anchor);
  238. GELOGD("Begin to insert %s node.", type.c_str());
  239. GE_CHECK_NOTNULL(out_anchor->GetOwnerNode()->GetOpDesc());
  240. GE_CHECK_NOTNULL(in_anchor->GetOwnerNode()->GetOpDesc());
  241. GeTensorDesc in_tensor = out_anchor->GetOwnerNode()->GetOpDesc()->GetOutputDesc(out_anchor->GetIdx());
  242. GeTensorDesc out_tensor = in_anchor->GetOwnerNode()->GetOpDesc()->GetInputDesc(out_anchor->GetIdx());
  243. out_tensor.SetDataType(DT_INT32);
  244. out_tensor.SetOriginDataType(DT_INT32);
  245. out_tensor.SetShape(in_tensor.GetShape());
  246. out_tensor.SetOriginShape(in_tensor.GetOriginShape());
  247. OpDescBuilder op_desc_builder(in_anchor->GetOwnerNode()->GetName() + "_" + type, type);
  248. OpDescPtr op_desc = op_desc_builder.AddInput("x", in_tensor).AddOutput("y", out_tensor).Build();
  249. if (op_desc == nullptr) {
  250. GELOGE(FAILED, "Create op_desc failed.");
  251. return FAILED;
  252. }
  253. NodePtr new_node = graph->AddNode(op_desc);
  254. if (new_node == nullptr) {
  255. GELOGE(FAILED, "Create %s node failed.", type.c_str());
  256. return FAILED;
  257. }
  258. AddRePassNode(new_node);
  259. if (GraphUtils::InsertNodeAfter(out_anchor, { in_anchor }, new_node) != GRAPH_SUCCESS) {
  260. GELOGE(FAILED, "Insert %s node %s between %s->%s failed.", type.c_str(),
  261. new_node->GetName().c_str(), out_anchor->GetOwnerNode()->GetName().c_str(),
  262. in_anchor->GetOwnerNode()->GetName().c_str());
  263. return FAILED;
  264. }
  265. return SUCCESS;
  266. }
  267. ///
  268. /// @brief Add cast node
  269. /// @param [in] graph
  270. /// @param [in] name
  271. /// @param [in] tensor
  272. /// @param [in] src
  273. /// @param [in] dst
  274. /// @return NodePtr
  275. ///
  276. NodePtr CondPass::AddCastNode(const ComputeGraphPtr &graph, const std::string &name, const GeTensorDesc &tensor,
  277. DataType src, DataType dst) {
  278. GELOGI("Begin to create cast op: %s, from %d to %d", name.c_str(), src, dst);
  279. GeTensorDesc in_tensor = tensor;
  280. in_tensor.SetDataType(src);
  281. in_tensor.SetOriginDataType(src);
  282. GeTensorDesc out_tensor = tensor;
  283. out_tensor.SetDataType(dst);
  284. out_tensor.SetOriginDataType(dst);
  285. OpDescBuilder op_desc_builder(name, CAST);
  286. OpDescPtr cast_desc = op_desc_builder.AddInput("x", in_tensor).AddOutput("y", out_tensor).Build();
  287. if (cast_desc == nullptr) {
  288. GELOGE(FAILED, "Create cast op_desc failed, name: %s.", name.c_str());
  289. return nullptr;
  290. }
  291. if (!(AttrUtils::SetInt(cast_desc, CAST_ATTR_SRCT, src) &&
  292. AttrUtils::SetInt(cast_desc, CAST_ATTR_DSTT, dst) &&
  293. AttrUtils::SetInt(cast_desc, CAST_ATTR_DST_TYPE, dst) &&
  294. AttrUtils::SetBool(cast_desc, CAST_ATTR_TRUNCATE, false))) {
  295. GELOGE(FAILED, "Set CAST_ATTR failed, node: %s.", name.c_str());
  296. return nullptr;
  297. }
  298. NodePtr cast_node = graph->AddNode(cast_desc);
  299. if (cast_node == nullptr) {
  300. GELOGE(FAILED, "Add cast node failed, name: %s.", name.c_str());
  301. return nullptr;
  302. }
  303. AddRePassNode(cast_node);
  304. return cast_node;
  305. }
  306. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示