You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

reshape_recovery_pass.cc 5.9 kB

4 years ago
4 years ago
4 years ago
4 years ago
5 years ago
4 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/passes/reshape_recovery_pass.h"
  17. #include "common/ge/ge_util.h"
  18. namespace ge {
  19. namespace {
  20. NodePtr CreateReshape(const ConstGeTensorDescPtr &src, const ConstGeTensorDescPtr &dst, const ComputeGraphPtr &graph) {
  21. static std::atomic_long reshape_num(0);
  22. auto next_num = reshape_num.fetch_add(1);
  23. auto reshape = MakeShared<OpDesc>("Reshape_ReshapeRecoveryPass_" + std::to_string(next_num), RESHAPE);
  24. if (reshape == nullptr) {
  25. REPORT_CALL_ERROR("E19999", "New OpDesc failed");
  26. GELOGE(FAILED, "[New][OpDesc] failed");
  27. return nullptr;
  28. }
  29. auto ret = reshape->AddInputDesc("x", *src);
  30. if (ret != GRAPH_SUCCESS) {
  31. REPORT_CALL_ERROR("E19999", "Add input desc to op:%s(%s) failed, name:x",
  32. reshape->GetName().c_str(), reshape->GetType().c_str());
  33. GELOGE(FAILED, "[Add][InputDesc] to op:%s(%s) failed, name:x",
  34. reshape->GetName().c_str(), reshape->GetType().c_str());
  35. return nullptr;
  36. }
  37. ret = reshape->AddInputDesc("shape", GeTensorDesc(GeShape(), Format(), DT_INT32));
  38. if (ret != GRAPH_SUCCESS) {
  39. REPORT_CALL_ERROR("E19999", "Add input desc to op:%s(%s) failed, name:shape",
  40. reshape->GetName().c_str(), reshape->GetType().c_str());
  41. GELOGE(FAILED, "[Add][InputDesc] to op:%s(%s) failed, name:shape",
  42. reshape->GetName().c_str(), reshape->GetType().c_str());
  43. return nullptr;
  44. }
  45. ret = reshape->AddOutputDesc("y", *dst);
  46. if (ret != GRAPH_SUCCESS) {
  47. REPORT_CALL_ERROR("E19999", "Add output desc to op:%s(%s) failed, name:y",
  48. reshape->GetName().c_str(), reshape->GetType().c_str());
  49. GELOGE(FAILED, "[Add][OutputDesc] to op:%s(%s) failed, name:y",
  50. reshape->GetName().c_str(), reshape->GetType().c_str());
  51. return nullptr;
  52. }
  53. return graph->AddNode(reshape);
  54. }
  55. Status InsertReshapeIfNeed(const NodePtr &node) {
  56. GE_CHECK_NOTNULL(node);
  57. GE_CHECK_NOTNULL(node->GetOpDesc());
  58. for (auto src_anchor : node->GetAllOutDataAnchors()) {
  59. auto src_tensor = node->GetOpDesc()->GetOutputDescPtr(src_anchor->GetIdx());
  60. GE_CHECK_NOTNULL(src_tensor);
  61. for (auto dst_anchor : src_anchor->GetPeerInDataAnchors()) {
  62. auto dst_node = dst_anchor->GetOwnerNode();
  63. GELOGD("Try insert reshape between %s[%d] and %s[%d] to keep the shape continues",
  64. node->GetName().c_str(), src_anchor->GetIdx(), dst_node->GetName().c_str(), dst_anchor->GetIdx());
  65. GE_CHECK_NOTNULL(dst_node);
  66. GE_CHECK_NOTNULL(dst_node->GetOpDesc());
  67. auto dst_tensor = dst_node->GetOpDesc()->MutableInputDesc(dst_anchor->GetIdx());
  68. GE_CHECK_NOTNULL(dst_tensor);
  69. bool is_dynamic = false;
  70. const auto &src_tensor_dims = src_tensor->GetShape().GetDims();
  71. const auto &dst_tensor_dims = dst_tensor->GetShape().GetDims();
  72. if ((std::any_of(src_tensor_dims.begin(), src_tensor_dims.end(), [](int64_t val) { return val < 0 ; }))
  73. || (std::any_of(dst_tensor_dims.begin(), dst_tensor_dims.end(), [](int64_t val) { return val < 0; }))) {
  74. GELOGD("No need to insert reshape node between %s nad %s.", node->GetName().c_str(),
  75. dst_node->GetName().c_str());
  76. is_dynamic = true;
  77. }
  78. if (dst_node->GetType() == NETOUTPUT && is_dynamic) {
  79. // NetOutput shape must be continuous when dynamic shape.
  80. // Otherwise, there may be an error waiting for the shape refresh to time out during execution.
  81. dst_tensor->SetShape(src_tensor->GetShape());
  82. continue;
  83. }
  84. bool is_need_insert_reshape = src_tensor_dims != dst_tensor_dims &&
  85. !is_dynamic;
  86. if (is_need_insert_reshape) {
  87. auto reshape = CreateReshape(src_tensor, dst_tensor, node->GetOwnerComputeGraph());
  88. GE_CHECK_NOTNULL(reshape);
  89. auto ret = GraphUtils::InsertNodeBetweenDataAnchors(src_anchor, dst_anchor, reshape);
  90. if (ret != GRAPH_SUCCESS) {
  91. REPORT_CALL_ERROR("E19999",
  92. "Insert node:%s(%s) between node:%s(%s)(out_index:%d) and node:%s(%s)(out_index:%d) failed",
  93. reshape->GetName().c_str(), reshape->GetType().c_str(),
  94. node->GetName().c_str(), node->GetType().c_str(), src_anchor->GetIdx(),
  95. dst_node->GetName().c_str(), dst_node->GetType().c_str(), dst_anchor->GetIdx());
  96. GELOGE(INTERNAL_ERROR,
  97. "[Insert][Node] %s(%s) between node:%s(%s)(out_index:%d) and node:%s(%s)(out_index:%d) failed",
  98. reshape->GetName().c_str(), reshape->GetType().c_str(),
  99. node->GetName().c_str(), node->GetType().c_str(), src_anchor->GetIdx(),
  100. dst_node->GetName().c_str(), dst_node->GetType().c_str(), dst_anchor->GetIdx());
  101. return INTERNAL_ERROR;
  102. }
  103. GELOGI("Insert reshape between %s and %s to keep the shape continues",
  104. node->GetName().c_str(), dst_node->GetName().c_str());
  105. }
  106. }
  107. }
  108. return SUCCESS;
  109. }
  110. } // namespace
  111. Status ReshapeRecoveryPass::Run(ComputeGraphPtr graph) {
  112. for (const auto &node : graph->GetDirectNode()) {
  113. auto ret = InsertReshapeIfNeed(node);
  114. if (ret != SUCCESS) {
  115. return ret;
  116. }
  117. }
  118. return SUCCESS;
  119. }
  120. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示