You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

switch_dead_branch_elimination.cc 8.1 kB

5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
4 years ago
5 years ago
4 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
5 years ago
5 years ago
4 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/passes/switch_dead_branch_elimination.h"
  17. #include <string>
  18. #include <vector>
  19. #include "framework/common/debug/ge_log.h"
  20. #include "graph/common/omg_util.h"
  21. #include "graph/passes/pass_utils.h"
  22. #include "graph/utils/graph_utils.h"
  23. namespace ge {
  24. namespace {
  25. const std::vector<int>::size_type kDataInputIndex = 0;
  26. const std::vector<int>::size_type kPredInputIndex = 1;
  27. const int kDefaultInputIndex = -1;
  28. bool ParsePred(const ConstGeTensorPtr &tensor) {
  29. if (tensor == nullptr) {
  30. REPORT_INNER_ERROR("E19999", "Param tensor is nullptr, check invalid");
  31. GELOGE(FAILED, "[Check][Param] parameter tensor is nullptr.");
  32. return false;
  33. }
  34. const uint8_t *data_ptr = tensor->GetData().data();
  35. auto type = tensor->GetTensorDesc().GetDataType();
  36. switch (type) {
  37. case DT_BOOL:
  38. return *reinterpret_cast<const bool *>(data_ptr);
  39. case DT_FLOAT:
  40. return static_cast<bool>(*reinterpret_cast<const float *>(data_ptr));
  41. case DT_DOUBLE:
  42. return static_cast<bool>(*reinterpret_cast<const double *>(data_ptr));
  43. case DT_INT8:
  44. case DT_UINT8:
  45. return static_cast<bool>(*data_ptr);
  46. case DT_FLOAT16:
  47. case DT_INT16:
  48. case DT_UINT16:
  49. return static_cast<bool>(*reinterpret_cast<const int16_t *>(data_ptr));
  50. case DT_INT32:
  51. case DT_UINT32:
  52. return static_cast<bool>(*reinterpret_cast<const int32_t *>(data_ptr));
  53. case DT_INT64:
  54. case DT_UINT64:
  55. return static_cast<bool>(*reinterpret_cast<const int64_t *>(data_ptr));
  56. default:
  57. return static_cast<bool>(*data_ptr);
  58. }
  59. }
  60. bool ParseOutDataAnchors(const NodePtr &node, const NodePtr &pred_node, OutDataAnchorPtr &active_out_data_anchor,
  61. OutDataAnchorPtr &inactive_out_data_anchor) {
  62. auto tensors = OpDescUtils::MutableWeights(pred_node);
  63. if (tensors.empty()) {
  64. REPORT_INNER_ERROR("E19999", "Node:%s(%s) has no weight, check invalid",
  65. pred_node->GetName().c_str(), pred_node->GetType().c_str());
  66. GELOGE(FAILED, "[Check][Param] Node:%s(%s) has no weight",
  67. pred_node->GetName().c_str(), pred_node->GetType().c_str());
  68. return false;
  69. }
  70. bool pred_value = ParsePred(tensors[0]);
  71. int inactive_output_index = pred_value ? 0 : 1;
  72. if (node == nullptr) {
  73. REPORT_INNER_ERROR("E19999", "Param node is nullptr, check invalid");
  74. GELOGE(FAILED, "[Check][Param] parameter node is nullptr.");
  75. return false;
  76. }
  77. GELOGI("[%s] Inactive output index = %d", node->GetName().c_str(), inactive_output_index);
  78. for (const auto &out_anchor : node->GetAllOutDataAnchors()) {
  79. if (out_anchor->GetIdx() == inactive_output_index) {
  80. inactive_out_data_anchor = out_anchor;
  81. } else {
  82. active_out_data_anchor = out_anchor;
  83. }
  84. }
  85. return true;
  86. }
  87. } // namespace
  88. Status SwitchDeadBranchElimination::DeleteSwitchNode(NodePtr &node, NodePtr &pred_node,
  89. const OutDataAnchorPtr &active_out_data_anchor) {
  90. if (node == nullptr || active_out_data_anchor == nullptr) {
  91. REPORT_INNER_ERROR("E19999", "Param node or active_out_data_anchor is nullptr, check invalid");
  92. GELOGE(FAILED, "[Check][Param] parameter node or active_out_data_anchor is nullptr.");
  93. return FAILED;
  94. }
  95. // If two nodes aren't in same graph, get node's direct in_node instead of pred_node.
  96. if (node->GetOwnerComputeGraph() != pred_node->GetOwnerComputeGraph()) {
  97. pred_node = PassUtils::GetInDataNode(node, kPredInputIndex);
  98. }
  99. // link pred's in control nodes to switch
  100. if (GraphUtils::CopyInCtrlEdges(pred_node, node) != GRAPH_SUCCESS) {
  101. REPORT_CALL_ERROR("E19999", "Copy in control edge from node:%s(%s) to node:%s(%s) failed",
  102. pred_node->GetName().c_str(), pred_node->GetType().c_str(),
  103. node->GetName().c_str(), node->GetType().c_str());
  104. GELOGE(FAILED, "[Copy][InCtrlEdges] from node:%s(%s) to node:%s(%s) failed",
  105. pred_node->GetName().c_str(), pred_node->GetType().c_str(),
  106. node->GetName().c_str(), node->GetType().c_str());
  107. return FAILED;
  108. }
  109. // Remove link between pred and switch
  110. auto in_pred_anchor = node->GetInDataAnchor(kPredInputIndex);
  111. GE_CHECK_NOTNULL(in_pred_anchor);
  112. in_pred_anchor->UnlinkAll();
  113. /// If condition Const is isolate, it will be delete with pruning
  114. /// Isolate Switch and delete it
  115. std::vector<int> switch_io_map = {kDefaultInputIndex, kDefaultInputIndex};
  116. size_t out_index = static_cast<size_t>(active_out_data_anchor->GetIdx());
  117. if (out_index >= switch_io_map.size()) {
  118. REPORT_INNER_ERROR("E19999", "Out index:%zu of node:%s(%s) >= %zu, check invalid", out_index,
  119. node->GetName().c_str(), node->GetType().c_str(), switch_io_map.size());
  120. GELOGE(FAILED, "[Check][Param] Out index:%zu of node:%s(%s) >= %zu.", out_index,
  121. node->GetName().c_str(), node->GetType().c_str(), switch_io_map.size());
  122. return FAILED;
  123. }
  124. switch_io_map[out_index] = kDataInputIndex;
  125. return IsolateAndDeleteNode(node, switch_io_map);
  126. }
  127. Status SwitchDeadBranchElimination::Run(NodePtr &node) {
  128. if (node == nullptr) {
  129. REPORT_INNER_ERROR("E19999", "Param node is nullptr, check invalid");
  130. GELOGE(PARAM_INVALID, "[Check][Param] Param [node] must not be null.");
  131. return PARAM_INVALID;
  132. }
  133. std::string op_type;
  134. GE_CHK_STATUS_RET(GetOriginalType(node, op_type),
  135. "[Get][OriginalType] of node:%s failed", node->GetName().c_str());
  136. if ((op_type != SWITCH) && (op_type != REFSWITCH)) {
  137. return SUCCESS;
  138. }
  139. if (node->GetOutAllNodes().empty()) {
  140. return SUCCESS;
  141. }
  142. auto pred_node = PassUtils::GetInNodeCrossSubgraphByIndex(node, kPredInputIndex);
  143. if (pred_node == nullptr) {
  144. GELOGD("[%s] Pred input is null.", node->GetName().c_str());
  145. return SUCCESS;
  146. }
  147. // Can be optimized when pred is constant
  148. if (!PassUtils::IsConstant(pred_node)) {
  149. GELOGD("[%s] Pred is not constant.", node->GetName().c_str());
  150. return SUCCESS;
  151. }
  152. auto input_node = PassUtils::GetInNodeCrossSubgraphByIndex(node, kDataInputIndex);
  153. if (input_node == nullptr) {
  154. GELOGD("[%s] Data input is null.", node->GetName().c_str());
  155. return SUCCESS;
  156. }
  157. // Get active & inactive output anchors by the value of pred
  158. OutDataAnchorPtr active_out_data_anchor = nullptr;
  159. OutDataAnchorPtr inactive_out_data_anchor = nullptr;
  160. if (!ParseOutDataAnchors(node, pred_node, active_out_data_anchor, inactive_out_data_anchor)) {
  161. return PARAM_INVALID;
  162. }
  163. if (inactive_out_data_anchor != nullptr) {
  164. GELOGI("[%s] To unlink inactive output %d", node->GetName().c_str(), inactive_out_data_anchor->GetIdx());
  165. std::vector<NodePtr> del_nodes;
  166. std::vector<NodePtr> end_nodes;
  167. Status ret = PassUtils::RemoveInactiveBranchToMerge(inactive_out_data_anchor, del_nodes, end_nodes);
  168. if (ret != SUCCESS) {
  169. REPORT_CALL_ERROR("E19999", "Remove inactive branch from node:%s(%s) to merge failed",
  170. node->GetName().c_str(), node->GetType().c_str());
  171. GELOGE(FAILED, "[Remove][InactiveBranch] from node:%s(%s) to merge failed",
  172. node->GetName().c_str(), node->GetType().c_str());
  173. return ret;
  174. }
  175. for (auto &end_node : end_nodes) {
  176. AddRePassNode(end_node);
  177. }
  178. for (const auto &delete_node : del_nodes) {
  179. AddNodeDeleted(delete_node);
  180. }
  181. }
  182. return DeleteSwitchNode(node, pred_node, active_out_data_anchor);
  183. }
  184. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示