You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

block_mem_assigner.cc 64 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/build/memory/block_mem_assigner.h"
  17. #include <algorithm>
  18. #include <sstream>
  19. #include "external/ge/ge_api_types.h"
  20. #include "framework/common/debug/ge_log.h"
  21. #include "graph/anchor.h"
  22. #include "graph/buffer.h"
  23. #include "graph/ge_attr_value.h"
  24. #include "graph/ge_context.h"
  25. #include "graph/node.h"
  26. #include "graph/utils/graph_utils.h"
  27. #include "graph/utils/node_utils.h"
  28. #include "graph/utils/op_desc_utils.h"
  29. #include "graph/utils/tensor_utils.h"
  30. #include "graph/debug/ge_attr_define.h"
  31. #include "graph/common/local_context.h"
  32. #include "graph/optimize/common/params.h"
  33. #include "omg/omg_inner_types.h"
  34. #include "runtime/mem.h"
  35. using std::list;
  36. using std::map;
  37. using std::pair;
  38. using std::set;
  39. using std::string;
  40. using std::stringstream;
  41. using std::unordered_map;
  42. using std::unordered_set;
  43. using std::vector;
  44. namespace {
  45. const char *const kAttrNameWorkspaceReuseFlag = "workspace_reuse_flag";
  46. const char *const kL2FusionDynamicConvergeOp = "l2fusion_dynamic_converge_op";
  47. const char *const kOpNoReuseMem = "no_reuse_mem_flag";
  48. const char *const OP_NO_REUSE_MEM = "OP_NO_REUSE_MEM";
  49. const int kReuseMaxCount = 10;
  50. const int kReuseMaxOpNum = 10;
  51. const int kReuseMaxCharNum = 2000;
  52. } // namespace
  53. namespace ge {
  54. void AlignMemOffset(size_t &mem_align_size) {
  55. if (mem_align_size <= 0) {
  56. return;
  57. }
  58. mem_align_size = (mem_align_size + MEM_ALIGN_SIZE - 1) / MEM_ALIGN_SIZE * MEM_ALIGN_SIZE;
  59. }
  60. void MemoryBlock::SetHeadOffset(size_t offset) {
  61. head_offset_ = offset;
  62. size_t child_offset = head_offset_;
  63. for (auto block : child_blocks_) {
  64. if (block != nullptr) {
  65. block->SetHeadOffset(child_offset);
  66. child_offset += block->Size();
  67. }
  68. }
  69. }
  70. void MemoryBlock::SetTailOffset(size_t offset) {
  71. tail_offset_ = offset;
  72. size_t child_offset = head_offset_;
  73. for (auto block : child_blocks_) {
  74. if (block != nullptr) {
  75. child_offset += block->Size();
  76. block->SetTailOffset(child_offset - 1);
  77. }
  78. }
  79. }
  80. void MemoryBlock::Resize() {
  81. size_t child_block_size = 0;
  82. for (auto block : child_blocks_) {
  83. if (block != nullptr) {
  84. block->Resize();
  85. child_block_size += block->Size();
  86. }
  87. }
  88. auto iter = std::max_element(real_size_list_.begin(), real_size_list_.end());
  89. if (iter == real_size_list_.end()) {
  90. GELOGW("real_size_list_ is empty");
  91. return;
  92. } else {
  93. size_t block_size = (child_block_size > *iter) ? child_block_size : *iter;
  94. if ((block_size > 0) && (block_size % MEM_ALIGN_SIZE != 0)) {
  95. AlignMemOffset(block_size);
  96. }
  97. block_size_ = block_size;
  98. if (last_continuous_block_) {
  99. block_size_ += MEM_ALIGN_SIZE;
  100. }
  101. }
  102. }
  103. size_t MemoryBlock::AlignSize() const {
  104. size_t align_block_size = 0;
  105. auto iter = std::max_element(real_size_list_.begin(), real_size_list_.end());
  106. if (iter == real_size_list_.end()) {
  107. GELOGW("real_size_list_ is empty");
  108. } else {
  109. align_block_size = *iter;
  110. if ((align_block_size > 0) && (align_block_size % MEM_ALIGN_SIZE != 0)) {
  111. AlignMemOffset(align_block_size);
  112. }
  113. }
  114. return align_block_size;
  115. }
  116. bool MemoryBlock::IsSameLabel(std::string &first_batch_label) {
  117. if (node_type_index_list_.empty()) {
  118. return false;
  119. }
  120. auto node_op_desc = node_type_index_list_[0].node->GetOpDesc();
  121. if (node_op_desc == nullptr) {
  122. return false;
  123. }
  124. // not all op has ATTR_NAME_BATCH_LABEL, no need check return value, only check out parameter
  125. (void)ge::AttrUtils::GetStr(node_op_desc, ATTR_NAME_BATCH_LABEL, first_batch_label);
  126. if (first_batch_label.empty()) {
  127. return false;
  128. }
  129. bool all_same_label = true;
  130. for (size_t index = 1; index < node_type_index_list_.size(); ++index) {
  131. if (node_type_index_list_[index].node == nullptr) {
  132. continue;
  133. }
  134. std::string batch_label;
  135. auto index_op_desc = node_type_index_list_[index].node->GetOpDesc();
  136. GE_IF_BOOL_EXEC(index_op_desc == nullptr, continue);
  137. (void)ge::AttrUtils::GetStr(index_op_desc, ATTR_NAME_BATCH_LABEL, batch_label);
  138. if (first_batch_label != batch_label) {
  139. all_same_label = false;
  140. break;
  141. }
  142. }
  143. return all_same_label;
  144. }
  145. bool CanNotLifeReuse(MemoryBlock *block) {
  146. if ((block == nullptr) || !block->reuse_mem_ || block->deleted_block_) {
  147. return true;
  148. }
  149. return false;
  150. }
  151. void MemoryBlock::AddContinuousLifeReuseBlock(MemoryBlock *block, DependStreamLife &total_node_depend_stream_life) {
  152. // continuous memory case:only real_size is maximum can be reused and only one continuous memory in one block
  153. auto it_block = std::max_element(std::begin(block->NoAlignSizeList()), std::end(block->NoAlignSizeList()));
  154. auto it_this = std::max_element(std::begin(NoAlignSizeList()), std::end(NoAlignSizeList()));
  155. if (it_block != std::end(block->NoAlignSizeList()) && it_this != std::end(NoAlignSizeList())) {
  156. if ((continuous_block_ && block->continuous_block_) || (continuous_block_ && (*it_this < *it_block)) ||
  157. (block->continuous_block_ && (*it_this > *it_block))) {
  158. GELOGD("Conflict current block size:%zu continuous:%d, reuse block max size:%zu continuous:%d", *it_this,
  159. continuous_block_, *it_block, block->continuous_block_);
  160. return;
  161. }
  162. }
  163. MemoryBlock *parent = nullptr;
  164. MemoryBlock *child = nullptr;
  165. // merge small block to large block
  166. if (block->GetDependLifeBegin(stream_id_, total_node_depend_stream_life) > GetLifeEnd()) {
  167. if ((block->child_offset_ + AlignSize()) <= *it_block) {
  168. parent = block;
  169. child = this;
  170. }
  171. }
  172. if ((parent != nullptr) && (child != nullptr) && child->child_blocks_.empty()) {
  173. parent->child_blocks_.emplace_back(child);
  174. parent->child_offset_ += child->AlignSize();
  175. child->deleted_block_ = true;
  176. GELOGI(
  177. "Add continuous block[%p size:%zu, stream id:%ld life time[begin:%zu, end:%zu]] to"
  178. " block[%p size:%zu, stream id:%ld, life time[begin:%zu, end:%zu]]",
  179. child, child->block_size_, child->stream_id_, child->GetLifeBegin(), child->GetLifeEnd(), parent,
  180. parent->block_size_, parent->stream_id_, parent->GetLifeBegin(), parent->GetLifeEnd());
  181. }
  182. }
  183. void MemoryBlock::AddLifeReuseBlock(MemoryBlock *block, DependStreamLife &total_node_depend_stream_life) {
  184. if (CanNotLifeReuse(this) || CanNotLifeReuse(block)) {
  185. return;
  186. }
  187. if (block->continuous_block_) {
  188. AddContinuousLifeReuseBlock(block, total_node_depend_stream_life);
  189. return;
  190. }
  191. MemoryBlock *parent = nullptr;
  192. MemoryBlock *child = nullptr;
  193. // merge small block to large block
  194. if (block->GetDependLifeBegin(stream_id_, total_node_depend_stream_life) > GetLifeEnd()) {
  195. if ((child_offset_ + block->AlignSize()) <= AlignSize()) {
  196. parent = this;
  197. child = block;
  198. } else if ((block->child_offset_ + AlignSize()) <= block->AlignSize()) {
  199. parent = block;
  200. child = this;
  201. }
  202. }
  203. if ((parent != nullptr) && (child != nullptr) && child->child_blocks_.empty()) {
  204. parent->child_blocks_.emplace_back(child);
  205. parent->child_offset_ += child->AlignSize();
  206. child->deleted_block_ = true;
  207. GELOGI(
  208. "Add block[%p size:%zu, stream id:%ld life time[begin:%zu, end:%zu]] to"
  209. " block[%p size:%zu, stream id:%ld, life time[begin:%zu, end:%zu]]",
  210. child, child->block_size_, child->stream_id_, child->GetLifeBegin(), child->GetLifeEnd(), parent,
  211. parent->block_size_, parent->stream_id_, parent->GetLifeBegin(), parent->GetLifeEnd());
  212. }
  213. }
  214. size_t MemoryBlock::GetLifeBegin() {
  215. size_t life_time = 0;
  216. if (!node_type_index_list_.empty()) {
  217. if (node_type_index_list_.front().node != nullptr) {
  218. auto node_op_desc = node_type_index_list_.front().node->GetOpDesc();
  219. if (node_op_desc != nullptr) {
  220. life_time = node_op_desc->GetId();
  221. }
  222. }
  223. }
  224. return life_time;
  225. }
  226. /// |-stream 1-| |-stream 2-|
  227. /// |--block1--| |--block---|
  228. /// |--block2--| |--block---|
  229. /// |--block3--|\ |--block---|
  230. /// |--block---| \ |--block---|
  231. /// |--block---| \|--block---|
  232. /// |--block---| |--block7--|
  233. /// |--block---| |--block---|
  234. /// block7's first node's input node's life begin > block2's life end, block7 can reuse block1~block2
  235. size_t MemoryBlock::GetDependLifeBegin(int64_t stream_id, DependStreamLife &total_node_depend_stream_life) {
  236. AddDependLifeBegin(total_node_depend_stream_life);
  237. auto it = depend_stream_life_.find(stream_id);
  238. if (it == depend_stream_life_.end()) {
  239. return 0;
  240. }
  241. return it->second;
  242. }
  243. void AddDependLife(const ge::NodePtr &org_node, const ge::NodePtr &node, int64_t stream_id,
  244. std::map<int64_t, size_t> &depend_stream_life, DependStreamLife &total_node_depend_stream_life) {
  245. GE_CHECK_NOTNULL_EXEC(node, return );
  246. auto node_desc = node->GetOpDesc();
  247. GE_CHECK_NOTNULL_EXEC(node_desc, return );
  248. auto node_id = node_desc->GetId();
  249. auto stream_life = total_node_depend_stream_life.find(node_id);
  250. if (stream_life != total_node_depend_stream_life.end()) {
  251. for (auto &it : stream_life->second) {
  252. if (depend_stream_life.find(it.first) == depend_stream_life.end()) {
  253. depend_stream_life[it.first] = it.second;
  254. }
  255. }
  256. return;
  257. }
  258. for (const auto &in_anchor : node->GetAllInAnchors()) {
  259. GE_CHECK_NOTNULL_EXEC(in_anchor, continue);
  260. for (auto peer_out_anchor : in_anchor->GetPeerAnchors()) {
  261. GE_CHECK_NOTNULL_EXEC(peer_out_anchor, continue);
  262. auto peer_node = peer_out_anchor->GetOwnerNode();
  263. GE_CHECK_NOTNULL_EXEC(peer_node, continue);
  264. auto peer_node_desc = peer_node->GetOpDesc();
  265. GE_CHECK_NOTNULL_EXEC(peer_node_desc, continue);
  266. auto peer_node_stream_id = peer_node_desc->GetStreamId();
  267. if (peer_node_stream_id < 0) {
  268. continue;
  269. }
  270. size_t peer_node_life_time = peer_node_desc->GetId();
  271. auto it = depend_stream_life.find(peer_node_stream_id);
  272. if (it == depend_stream_life.end() || peer_node_life_time > it->second) {
  273. depend_stream_life[peer_node_stream_id] = peer_node_life_time;
  274. if (peer_node_stream_id != stream_id) {
  275. GELOGI("Node:%s stream id:%ld depend node:%s stream id:%ld index[%d] life time[%zu].",
  276. org_node->GetName().c_str(), stream_id, peer_node_desc->GetName().c_str(), peer_node_stream_id,
  277. peer_out_anchor->GetIdx(), peer_node_life_time);
  278. }
  279. AddDependLife(org_node, peer_node, stream_id, depend_stream_life, total_node_depend_stream_life);
  280. }
  281. }
  282. }
  283. // save on node to save next calculation
  284. for (auto &it : depend_stream_life) {
  285. if (total_node_depend_stream_life[node_id].find(it.first) == total_node_depend_stream_life[node_id].end()) {
  286. total_node_depend_stream_life[node_id][it.first] = it.second;
  287. }
  288. }
  289. }
  290. void MemoryBlock::AddDependLifeBegin(DependStreamLife &total_node_depend_stream_life) {
  291. if (!depend_stream_life_.empty()) {
  292. return;
  293. }
  294. if (!node_type_index_list_.empty()) {
  295. auto node = node_type_index_list_.front().node;
  296. if (node != nullptr) {
  297. AddDependLife(node, node, stream_id_, depend_stream_life_, total_node_depend_stream_life);
  298. }
  299. }
  300. depend_stream_life_[stream_id_] = GetLifeBegin();
  301. }
  302. size_t MemoryBlock::GetLifeEnd() {
  303. if (!node_type_index_list_.empty()) {
  304. return node_type_index_list_.back().life_time_end;
  305. }
  306. return kMaxLifeTime;
  307. }
  308. void MemoryBlock::SetLifeTimeEnd(size_t time) {
  309. if (!node_type_index_list_.empty()) {
  310. node_type_index_list_.back().life_time_end = time;
  311. }
  312. }
  313. void SetLastUsedInputMemAttr(NodePtr &node, int input_index) {
  314. if (node == nullptr) {
  315. return;
  316. }
  317. auto node_op_desc = node->GetOpDesc();
  318. if (node_op_desc != nullptr) {
  319. auto input_desc = node_op_desc->GetInputDesc(input_index);
  320. if (!ge::AttrUtils::SetInt(input_desc, ATTR_NAME_IS_END_OF_INPUTMEM_LIFECYCLE, true)) {
  321. GELOGW("Set %s input[%d] ATTR_NAME_IS_END_OF_INPUTMEM_LIFECYCLE to true failed.", node_op_desc->GetName().c_str(),
  322. input_index);
  323. return;
  324. }
  325. GELOGD("Set %s input[%d] ATTR_NAME_IS_END_OF_INPUTMEM_LIFECYCLE to true success.", node_op_desc->GetName().c_str(),
  326. input_index);
  327. if (node_op_desc->UpdateInputDesc(input_index, input_desc) != GRAPH_SUCCESS) {
  328. GELOGW("Update %s input[%d] desc failed.", node_op_desc->GetName().c_str(), input_index);
  329. }
  330. }
  331. }
  332. Status GetNoAlignSize(const ge::OpDesc &desc, uint32_t index, size_t &size) {
  333. // calculate tensor real size
  334. auto output_op_desc = desc.GetOutputDescPtr(index);
  335. if (output_op_desc == nullptr) {
  336. GELOGI("GetNoAlignSize failed. OpName: %s, OpType: %s, index: %d", desc.GetName().c_str(), desc.GetType().c_str(),
  337. index);
  338. return FAILED;
  339. }
  340. int64_t tensor_size = 0;
  341. GeShape shape = output_op_desc->GetShape();
  342. Format format = output_op_desc->GetFormat();
  343. DataType data_type = output_op_desc->GetDataType();
  344. graphStatus graph_status = TensorUtils::CalcTensorMemSize(shape, format, data_type, tensor_size);
  345. if (graph_status != GRAPH_SUCCESS) {
  346. GELOGE(graph_status, "CalcTensorMemSize failed!");
  347. return FAILED;
  348. }
  349. size = static_cast<size_t>(tensor_size);
  350. return SUCCESS;
  351. }
  352. string ToString(ge::NodeTypeIndex &x) {
  353. stringstream ss;
  354. ss << "[" << x.node->GetName() << "(" << x.node->GetType() << "), ";
  355. if (x.mem_type == kOutput) {
  356. ss << "Output, ";
  357. } else {
  358. ss << "Workspace, ";
  359. }
  360. ss << x.index << "]";
  361. return ss.str();
  362. }
  363. string MemoryBlock::String() {
  364. stringstream ss;
  365. ss << "Block size: " << Size() << " from " << HeadOffset() << " to " << TailOffset() << " ";
  366. ss << "real_size_list: " << ToString(real_size_list_) << " ";
  367. ss << "ref_count: " << ref_count_ << " ";
  368. ss << "members: ";
  369. for (auto x : NodeTypeIndexList()) {
  370. ss << "__node: " << ToString(x) << " ";
  371. }
  372. for (const auto &symbol : SymbolList()) {
  373. ss << "__symbol: " << symbol << " ";
  374. }
  375. return ss.str();
  376. }
  377. BlockMemAssigner::BlockMemAssigner(ComputeGraphPtr compute_graph, const map<string, string> &anchor_to_symbol,
  378. const map<string, list<NodeIndexIO>> &symbol_to_anchors)
  379. : mem_offset_(0),
  380. compute_graph_(std::move(compute_graph)),
  381. symbol_to_anchors_(symbol_to_anchors),
  382. anchor_to_symbol_(anchor_to_symbol),
  383. life_time_(0) {}
  384. BlockMemAssigner::~BlockMemAssigner() {
  385. GELOGD("blocks_store_ size : %lu", blocks_store_.size());
  386. for (MemoryBlock *memory_block : blocks_store_) {
  387. GE_DELETE_NEW_SINGLE(memory_block);
  388. }
  389. }
  390. void BlockMemAssigner::GetOutAndWorkSpaceMem(vector<int64_t> &all_memory_size) {
  391. vector<int64_t> temp;
  392. for (const NodePtr &n : compute_graph_->GetAllNodes()) {
  393. auto node_op_desc = n->GetOpDesc();
  394. GE_IF_BOOL_EXEC(node_op_desc == nullptr, continue);
  395. if (node_op_desc->GetType() == ATOMICADDRCLEAN) {
  396. atomic_addr_clean_id_ = node_op_desc->GetId();
  397. }
  398. for (auto &out_anchor : n->GetAllOutDataAnchors()) {
  399. GeTensorDesc output_desc = node_op_desc->GetOutputDesc(out_anchor->GetIdx());
  400. bool reuse_input = false;
  401. GE_IF_BOOL_EXEC(ge::TensorUtils::GetReuseInput(output_desc, reuse_input) != SUCCESS,
  402. GELOGI("Get reuse_input failed"));
  403. if (!reuse_input) {
  404. int64_t size = 0;
  405. GE_IF_BOOL_EXEC(ge::TensorUtils::GetSize(output_desc, size) != SUCCESS, GELOGI("Get size failed"));
  406. if (anchor_to_symbol_.empty()) {
  407. all_memory_size.emplace_back(size);
  408. } else {
  409. auto iter1 = anchor_to_symbol_.find(NodeIndexIO(n, out_anchor->GetIdx(), kOut).ToString());
  410. if (iter1 == anchor_to_symbol_.end()) {
  411. continue;
  412. }
  413. const std::string &symbol = iter1->second;
  414. auto iter2 = symbol_size_.find(symbol);
  415. if (iter2 == symbol_size_.end()) {
  416. symbol_size_[symbol] = size;
  417. } else if (size > static_cast<int64_t>(iter2->second)) {
  418. iter2->second = size;
  419. }
  420. }
  421. }
  422. }
  423. temp.clear();
  424. GetNodeWorkSpaceSize(n, temp);
  425. all_memory_size.insert(all_memory_size.end(), temp.begin(), temp.end());
  426. }
  427. GELOGI("The last atomic_addr_clean node id: %ld", atomic_addr_clean_id_);
  428. for (const auto &pair : symbol_size_) {
  429. all_memory_size.emplace_back(pair.second);
  430. }
  431. sort(all_memory_size.begin(), all_memory_size.end());
  432. GELOGI("All memory size: %s", ToString(all_memory_size).c_str());
  433. for (auto iter = all_memory_size.begin(); iter != all_memory_size.end();) {
  434. if (*iter == 0) {
  435. iter = all_memory_size.erase(iter);
  436. } else {
  437. ++iter;
  438. }
  439. }
  440. InitReuseFlag();
  441. PrintSymbolMap();
  442. }
  443. ///
  444. /// @ingroup domi
  445. /// @brief decide memory size based on actual input memory size
  446. /// @param [in] size actual memory size in need
  447. /// @param [in] ranges memory size provided
  448. /// @return size_t memory size to apply
  449. ///
  450. size_t GetBlockSize(size_t size, const vector<int64_t> &ranges) {
  451. for (int64_t x : ranges) {
  452. auto x_temp = static_cast<size_t>(x);
  453. if (size <= x_temp) {
  454. return x_temp;
  455. }
  456. }
  457. GELOGW("Memory needed size:%zu is beyond the biggest block in memory ranges.", size);
  458. return size;
  459. }
  460. bool IsDirectOutputNode(const NodePtr &node, int idx) {
  461. if ((node != nullptr) && (node->GetOpDesc() != nullptr) && (node->GetOpDesc()->GetType() == NETOUTPUT)) {
  462. GELOGI("This is netoutput node, the input node mem can not be reused");
  463. return true;
  464. }
  465. return false;
  466. }
  467. void AddReusableBlockCount(const MemoryBlock &mem_block, map<string, uint64_t> &reusable_block_counts) {
  468. string key = std::to_string(mem_block.Size());
  469. key += "_" + std::to_string(mem_block.stream_id_);
  470. auto it = reusable_block_counts.find(key);
  471. if (it != reusable_block_counts.end()) {
  472. it->second++;
  473. } else {
  474. reusable_block_counts[key] = 1;
  475. }
  476. }
  477. void ReduceReusableBlockCount(const MemoryBlock &mem_block, map<string, uint64_t> &reusable_block_counts) {
  478. string key = std::to_string(mem_block.Size());
  479. key += "_" + std::to_string(mem_block.stream_id_);
  480. auto it = reusable_block_counts.find(key);
  481. if (it != reusable_block_counts.end()) {
  482. if (it->second > 0) {
  483. it->second--;
  484. }
  485. }
  486. }
  487. bool CanReuseBySize(const map<string, uint64_t> &reusable_block_counts, const MemoryBlock &reusable_block,
  488. size_t block_size, size_t real_size, bool continuous) {
  489. bool can_reuse = false;
  490. if (reusable_block.Size() == block_size) {
  491. can_reuse = true;
  492. } else {
  493. string key = std::to_string(reusable_block.Size());
  494. key += "_" + std::to_string(reusable_block.stream_id_);
  495. auto it = reusable_block_counts.find(key);
  496. GE_IF_BOOL_EXEC(
  497. (it != reusable_block_counts.end() && (it->second > kReuseMaxCount)) && (reusable_block.Size() > block_size),
  498. can_reuse = true;
  499. GELOGD("Less size mem reuse, reuse block size:%zu, current block size:%zu", reusable_block.Size(), block_size););
  500. }
  501. return can_reuse;
  502. }
  503. bool BlockMemAssigner::IsOutNodeSetContinuousInput(const NodePtr &n, uint32_t out_index, std::string &peer_name,
  504. uint32_t &peer_input_index, bool &no_need_assign_memory) {
  505. if (n == nullptr || n->GetAllOutDataAnchors().size() <= 0) {
  506. return false;
  507. }
  508. if (static_cast<size_t>(out_index) < n->GetAllOutDataAnchors().size()) {
  509. auto out_anchor = n->GetOutDataAnchor(out_index);
  510. GE_IF_BOOL_EXEC(out_anchor == nullptr,
  511. GELOGE(FAILED, "Node[%s] output[%u] anchor is null.", n->GetName().c_str(), out_index);
  512. return false;);
  513. for (auto const &peer_in_anchor : out_anchor->GetPeerInDataAnchors()) {
  514. GE_IF_BOOL_EXEC(peer_in_anchor == nullptr,
  515. GELOGE(FAILED, "Node[%s] output[%u] peer_in_anchor 0 is null.", n->GetName().c_str(), out_index);
  516. return false;);
  517. auto peer_node = peer_in_anchor->GetOwnerNode();
  518. GE_IF_BOOL_EXEC(peer_node == nullptr,
  519. GELOGE(FAILED, "Node[%s] output[%u] node is null.", n->GetName().c_str(), out_index);
  520. return false;);
  521. // Get the continuous input type of the node, default is false
  522. bool is_input_continuous = false;
  523. auto peer_in_node_desc = peer_node->GetOpDesc();
  524. GE_IF_BOOL_EXEC(peer_in_node_desc == nullptr,
  525. GELOGE(FAILED, "Node[%s] output[%u] nodedesc is null.", n->GetName().c_str(), out_index);
  526. return false;);
  527. // If GetBool fail, is_input_continuous is false.
  528. (void)ge::AttrUtils::GetBool(peer_in_node_desc, ATTR_NAME_CONTINUOUS_INPUT, is_input_continuous);
  529. GE_IF_BOOL_EXEC(is_input_continuous && CheckIsZeroMemNodeType(peer_node->GetType()),
  530. GELOGI("Node[%s] output[%u] no_need_assign_memory.", n->GetName().c_str(), out_index);
  531. no_need_assign_memory = true; return false;);
  532. if (is_input_continuous) {
  533. if (n->GetOwnerComputeGraph() != nullptr) {
  534. string graph_name = n->GetOwnerComputeGraph()->GetName();
  535. GELOGI("%s name[%s] output[%u] node[%s] set input[%d] continuous, input size[%u].", graph_name.c_str(),
  536. n->GetName().c_str(), out_index, peer_in_node_desc->GetName().c_str(), peer_in_anchor->GetIdx(),
  537. peer_node->GetAllInDataAnchorsSize());
  538. // Only set attr one times.
  539. if (node_continuous_input_blocks_[peer_in_node_desc->GetName()].size() == 0) {
  540. (void)ge::AttrUtils::SetBool(peer_in_node_desc, ATTR_NAME_CONTINUOUS_INPUT_ALLOC, true);
  541. node_continuous_input_counts_[peer_in_node_desc->GetName()] = peer_node->GetAllInDataAnchorsSize();
  542. }
  543. peer_input_index = peer_in_anchor->GetIdx();
  544. peer_name = peer_in_node_desc->GetName();
  545. return true;
  546. }
  547. }
  548. }
  549. }
  550. return false;
  551. }
  552. ///
  553. /// @ingroup GE
  554. /// @brief Check pre_reuse flag & post_reuse glag for each symbol
  555. /// @return void
  556. ///
  557. void BlockMemAssigner::InitReuseFlag() {
  558. static const std::set<std::string> kPreReuseTypes = {ge::DATA_TYPE, ge::AIPP_DATA_TYPE, ge::ANN_DATA_TYPE,
  559. ge::NETOUTPUT, ge::PROPOSAL, ge::ZEROSLIKE,
  560. ge::CONSTANT, ge::CONSTANTOP};
  561. static const std::set<std::string> kPostReuseTypes = {ge::DATA_TYPE, ge::AIPP_DATA_TYPE, ge::ENTER,
  562. ge::REFENTER, ge::NEXTITERATION, ge::REFNEXTITERATION};
  563. for (const auto &pair : symbol_to_anchors_) {
  564. std::string symbol = pair.first;
  565. bool pre_reuse_flag = true;
  566. bool post_reuse_flag = true;
  567. for (const auto &node_index_io : pair.second) {
  568. if (node_index_io.io_type_ == kIn) {
  569. continue;
  570. }
  571. OutDataAnchorPtr out_anchor = node_index_io.node_->GetOutDataAnchor(node_index_io.index_);
  572. if (out_anchor == nullptr) {
  573. continue;
  574. }
  575. bool out_flg = false;
  576. if (node_index_io.node_->GetOutDataNodes().empty()) {
  577. out_flg = true;
  578. }
  579. for (const auto &in_anchor : out_anchor->GetPeerInDataAnchors()) {
  580. if (IsDirectOutputNode(in_anchor->GetOwnerNode(), in_anchor->GetIdx())) {
  581. out_flg = true;
  582. break;
  583. }
  584. }
  585. const std::string &type = out_anchor->GetOwnerNode()->GetType();
  586. pre_reuse_flag = pre_reuse_flag && !out_flg && (kPreReuseTypes.count(type) == 0);
  587. post_reuse_flag = post_reuse_flag && (kPostReuseTypes.count(type) == 0);
  588. if (!pre_reuse_flag && !post_reuse_flag) {
  589. break;
  590. }
  591. }
  592. pre_reuse_flag_[symbol] = pre_reuse_flag;
  593. post_reuse_flag_[symbol] = post_reuse_flag;
  594. }
  595. }
  596. ///
  597. /// @ingroup GE
  598. /// @brief get pre_reuse flag
  599. /// @param [in] node
  600. /// @param [in] out_index
  601. /// @return bool
  602. ///
  603. bool BlockMemAssigner::IsPreReuse(const NodePtr &node, uint32_t out_index) const {
  604. OutDataAnchorPtr out_data_anchor = nullptr;
  605. if (static_cast<size_t>(out_index) < node->GetAllOutDataAnchors().size()) {
  606. out_data_anchor = node->GetOutDataAnchor(out_index);
  607. }
  608. if (out_data_anchor == nullptr) {
  609. return false;
  610. }
  611. NodeIndexIO cur_node_index_io(out_data_anchor->GetOwnerNode(), out_data_anchor->GetIdx(), kOut);
  612. auto iter1 = anchor_to_symbol_.find(cur_node_index_io.ToString());
  613. if (iter1 == anchor_to_symbol_.end()) {
  614. return false;
  615. }
  616. const std::string &symbol = iter1->second;
  617. auto iter2 = pre_reuse_flag_.find(symbol);
  618. if (iter2 == pre_reuse_flag_.end()) {
  619. return false;
  620. }
  621. return iter2->second;
  622. }
  623. ///
  624. /// @ingroup GE
  625. /// @brief get post_reuse flag
  626. /// @param [in] mem_block
  627. /// @return bool
  628. ///
  629. bool BlockMemAssigner::IsPostReuse(const MemoryBlock *mem_block) const {
  630. if (mem_block == nullptr) {
  631. return false;
  632. }
  633. for (const auto &symbol : mem_block->SymbolList()) {
  634. auto iter = post_reuse_flag_.find(symbol);
  635. if (iter == post_reuse_flag_.end()) {
  636. continue;
  637. }
  638. if (!iter->second) {
  639. return false;
  640. }
  641. }
  642. return true;
  643. }
  644. ///
  645. /// @ingroup GE
  646. /// @brief check if symbol of cur node_index_io has block
  647. /// @param [in] node_index_io
  648. /// @param [out] symbol
  649. /// @return bool
  650. ///
  651. bool BlockMemAssigner::IsSymbolExist(const NodeIndexIO &node_index_io, string &symbol) {
  652. auto iter = anchor_to_symbol_.find(node_index_io.ToString());
  653. if (iter == anchor_to_symbol_.end()) {
  654. return false;
  655. }
  656. symbol = iter->second;
  657. return symbol_blocks_.find(iter->second) != symbol_blocks_.end();
  658. }
  659. ///
  660. /// @ingroup GE
  661. /// @brief Print symbol
  662. /// @return void
  663. ///
  664. void BlockMemAssigner::PrintSymbolMap() {
  665. for (const auto &pair : symbol_to_anchors_) {
  666. GELOGD("symbol=%s, max_size=%zu, pre_reuse=%s, post_reuse=%s", pair.first.c_str(), symbol_size_[pair.first],
  667. pre_reuse_flag_[pair.first] ? "true" : "false", post_reuse_flag_[pair.first] ? "true" : "false");
  668. for (const auto &node_index_io : pair.second) {
  669. GELOGD("anchor:%s", node_index_io.ToString().c_str());
  670. }
  671. }
  672. }
  673. bool BlockMemAssigner::IsContinuousOutput(const NodePtr &n) {
  674. if (n == nullptr) {
  675. GELOGE(FAILED, "Node is null.");
  676. return false;
  677. }
  678. // Get the continuous output type of the node, default is false
  679. bool is_output_continuous = false;
  680. auto node_desc = n->GetOpDesc();
  681. if (node_desc == nullptr) {
  682. GELOGE(FAILED, "Node[%s] nodedesc is null.", n->GetName().c_str());
  683. return false;
  684. }
  685. // If GetBool fail, is_output_continuous is false.
  686. (void)ge::AttrUtils::GetBool(node_desc, ATTR_NAME_CONTINUOUS_OUTPUT, is_output_continuous);
  687. if (is_output_continuous) {
  688. if (n->GetOwnerComputeGraph() != nullptr) {
  689. string graph_name = n->GetOwnerComputeGraph()->GetName();
  690. GELOGI("%s name[%s] set continuous, output size[%u].", graph_name.c_str(), n->GetName().c_str(),
  691. n->GetAllOutDataAnchorsSize());
  692. return true;
  693. }
  694. }
  695. return false;
  696. }
  697. bool BlockMemAssigner::IsZeroCopyBlock(const NodePtr &node, bool continuous) {
  698. if (NodeUtils::IsDynamicShape(node)) {
  699. return ((node->GetType() == DATA_TYPE) && !continuous) || (node->GetType() == NETOUTPUT);
  700. }
  701. if ((node->GetType() == DATA_TYPE) && !continuous) {
  702. return !node->GetOpDesc()->HasAttr(ATTR_NAME_PARENT_NODE_INDEX);
  703. }
  704. if (node->GetType() == NETOUTPUT) {
  705. const auto &owner = node->GetOwnerComputeGraph();
  706. return owner->GetParentGraph() == nullptr;
  707. }
  708. return false;
  709. }
  710. MemoryBlock *BlockMemAssigner::ApplyMemory(size_t block_size, size_t real_size, size_t no_align_size,
  711. MemoryType mem_type, const NodePtr &n, uint32_t out_index,
  712. const vector<bool> &workspace_reuse_flag, const bool is_op_reuse_mem,
  713. const bool continuous) {
  714. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(n == nullptr, return nullptr, "Input parameter n is null.");
  715. auto node_op_desc = n->GetOpDesc();
  716. GE_IF_BOOL_EXEC(node_op_desc == nullptr, return nullptr);
  717. bool is_reuse_memory = false;
  718. string ge_disable_reuse_mem_env = "0";
  719. (void)ge::GetContext().GetOption(OPTION_EXEC_DISABLE_REUSED_MEMORY, ge_disable_reuse_mem_env);
  720. if (ge_disable_reuse_mem_env != "1") {
  721. bool reuse_mem_flag = !((workspace_reuse_flag.size() > out_index) && !workspace_reuse_flag[out_index]);
  722. is_reuse_memory = !node_op_desc->HasAttr(kL2FusionDynamicConvergeOp) && !node_op_desc->HasAttr(kOpNoReuseMem) &&
  723. reuse_mem_flag && is_op_reuse_mem && (IsPreReuse(n, out_index));
  724. auto stream_id = node_op_desc->GetStreamId();
  725. if (is_reuse_memory && !continuous) {
  726. for (auto it = reusable_blocks_[stream_id].begin(); it != reusable_blocks_[stream_id].end(); ++it) {
  727. MemoryBlock *reusable_block = *it;
  728. if (!IsPostReuse(reusable_block)) {
  729. reusable_block->reuse_mem_ = false;
  730. GELOGI("Unreusable block.");
  731. continue;
  732. }
  733. // A node can reuse blocks of the same stream and preorder streams
  734. if (CanReuseBySize(reusable_block_counts_, *reusable_block, block_size, real_size, continuous)) {
  735. reusable_block->AddNodeTypeIndex({n, mem_type, out_index, false}, real_size, no_align_size);
  736. if (mem_type == kOutput) {
  737. auto iter = anchor_to_symbol_.find(NodeIndexIO(n, out_index, kOut).ToString());
  738. if (iter != anchor_to_symbol_.end()) {
  739. reusable_block->AddSymbol(iter->second);
  740. }
  741. }
  742. reusable_block->continuous_block_ = continuous;
  743. reusable_block->ref_count_++;
  744. ReduceReusableBlockCount(*reusable_block, reusable_block_counts_);
  745. reusable_blocks_[stream_id].erase(it);
  746. return reusable_block;
  747. }
  748. }
  749. }
  750. }
  751. auto block = new (std::nothrow) MemoryBlock(block_size, node_op_desc->GetStreamId(), is_reuse_memory);
  752. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(block == nullptr, return nullptr, "new an object failed.");
  753. // Data and netoutput need zero copy block
  754. block->is_zero_copy_ = IsZeroCopyBlock(n, continuous);
  755. block->Init(real_size, mem_type, n, out_index, no_align_size);
  756. block->stream_id_ = node_op_desc->GetStreamId();
  757. block->ref_count_++;
  758. block->continuous_block_ = continuous;
  759. if (mem_type == kOutput) {
  760. auto iter = anchor_to_symbol_.find(NodeIndexIO(n, out_index, kOut).ToString());
  761. if (iter != anchor_to_symbol_.end()) {
  762. block->AddSymbol(iter->second);
  763. }
  764. }
  765. memory_blocks_.emplace_back(block);
  766. blocks_store_.emplace_back(block);
  767. return block;
  768. }
  769. MemoryBlock *BlockMemAssigner::ApplyContinuousMemory(const NodePtr &n, const vector<int64_t> &ranges,
  770. const bool is_op_reuse_mem) {
  771. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(n == nullptr, return nullptr, "input node is null.");
  772. auto node_op_desc = n->GetOpDesc();
  773. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(node_op_desc == nullptr, return nullptr, "node_op_desc is null.");
  774. MemoryBlock *block = nullptr;
  775. int64_t total_size = 0;
  776. for (uint32_t index = 0; index < static_cast<uint32_t>(node_op_desc->GetOutputsSize()); index++) {
  777. auto output_op_desc = node_op_desc->GetOutputDescPtr(index);
  778. if (output_op_desc == nullptr) {
  779. return nullptr;
  780. }
  781. int64_t size = 0;
  782. if (ge::TensorUtils::GetSize(*output_op_desc, size) != SUCCESS) {
  783. GELOGI("Get size failed");
  784. return nullptr;
  785. }
  786. size_t align_size = static_cast<size_t>(size);
  787. AlignMemOffset(align_size);
  788. total_size += align_size;
  789. // only apply total size in first block
  790. if (index != 0) {
  791. zero_memory_list_.emplace_back(n, kOutput, index);
  792. }
  793. }
  794. auto block_size = GetBlockSize(total_size, ranges);
  795. GELOGI("Node[%s] continuous out memory size[%ld] block size[%zu]", node_op_desc->GetName().c_str(), total_size,
  796. block_size);
  797. vector<bool> workspace_reuse_flag;
  798. block = ApplyMemory(block_size, total_size, total_size, kOutput, n, 0, workspace_reuse_flag, is_op_reuse_mem, true);
  799. if (block != nullptr) {
  800. // hccl task need align header and tail
  801. block->first_continuous_block_ = true;
  802. block->last_continuous_block_ = true;
  803. }
  804. return block;
  805. }
  806. MemoryBlock *BlockMemAssigner::ApplyOutMemory(const NodePtr &n, uint32_t index, const vector<int64_t> &ranges,
  807. const bool is_op_reuse_mem, const bool continuous) {
  808. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(n == nullptr, return nullptr, "input node is null.");
  809. auto node_op_desc = n->GetOpDesc();
  810. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(node_op_desc == nullptr, return nullptr, "node_op_desc is null.");
  811. MemoryBlock *block = nullptr;
  812. NodeIndexIO node_index_io(n, index, kOut);
  813. int64_t size = 0;
  814. auto output_op_desc = node_op_desc->GetOutputDescPtr(index);
  815. if (output_op_desc != nullptr) {
  816. GE_IF_BOOL_EXEC(ge::TensorUtils::GetSize(*output_op_desc, size) != SUCCESS, GELOGI("Get size failed"));
  817. }
  818. size_t no_align_size = 0;
  819. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(GetNoAlignSize(*node_op_desc, index, no_align_size) != SUCCESS, return nullptr,
  820. "Get no align size failed");
  821. std::string symbol;
  822. if (IsSymbolExist(node_index_io, symbol)) {
  823. block = symbol_blocks_[symbol];
  824. block->AddNodeTypeIndex({n, kOutput, index, true}, size, no_align_size);
  825. block->ref_count_++;
  826. } else {
  827. int64_t max_size = size;
  828. auto iter1 = anchor_to_symbol_.find(node_index_io.ToString());
  829. if (iter1 != anchor_to_symbol_.end()) {
  830. auto iter2 = symbol_size_.find(iter1->second);
  831. if (iter2 != symbol_size_.end()) {
  832. max_size = iter2->second;
  833. }
  834. }
  835. auto block_size = GetBlockSize(max_size, ranges);
  836. vector<bool> workspace_reuse_flag;
  837. block = ApplyMemory(block_size, size, no_align_size, kOutput, n, index, workspace_reuse_flag, is_op_reuse_mem,
  838. continuous);
  839. }
  840. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(block == nullptr, return nullptr, "Block is nullptr.");
  841. int out_count_reuse_input = block->ref_count_;
  842. int out_count = 0;
  843. GE_IF_BOOL_EXEC(index >= n->GetAllOutDataAnchors().size(), GELOGE(FAILED, "index is out of range."); return nullptr);
  844. auto out_data_anchor = n->GetOutDataAnchor(index);
  845. GE_IF_BOOL_EXEC(out_data_anchor == nullptr, GELOGE(FAILED, "Out data anchor is nullptr."); return nullptr);
  846. for (const auto &in_anchor : out_data_anchor->GetPeerInDataAnchors()) {
  847. auto owner_node = in_anchor->GetOwnerNode();
  848. auto op_desc = owner_node->GetOpDesc();
  849. GE_IF_BOOL_EXEC(op_desc == nullptr, continue);
  850. Params *instance = Params::Instance();
  851. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(instance == nullptr, return nullptr, "Params instance is nullptr.");
  852. if (!((instance->GetTarget() == TARGET_TYPE_TINY) && (op_desc->GetType() == NETOUTPUT))) {
  853. out_count++;
  854. }
  855. }
  856. bool reuse_input = false;
  857. for (const auto &in_anchor : out_data_anchor->GetPeerInDataAnchors()) {
  858. auto owner_node = in_anchor->GetOwnerNode();
  859. GE_IF_BOOL_EXEC(owner_node == nullptr, continue);
  860. auto op_desc = owner_node->GetOpDesc();
  861. GE_IF_BOOL_EXEC(op_desc == nullptr, continue);
  862. for (uint32_t i = 0; i < static_cast<uint32_t>(op_desc->GetOutputsSize()); i++) {
  863. bool dst_reuse_input = false;
  864. uint32_t dst_reuse_input_index = 0;
  865. auto owner_node_op_desc = op_desc->GetOutputDescPtr(i);
  866. GE_IF_BOOL_EXEC(owner_node_op_desc == nullptr, continue);
  867. GE_IF_BOOL_EXEC(ge::TensorUtils::GetReuseInput(*owner_node_op_desc, dst_reuse_input) != SUCCESS,
  868. GELOGI("Get dst_reuse_input failed"));
  869. GE_IF_BOOL_EXEC(ge::TensorUtils::GetReuseInputIndex(*owner_node_op_desc, dst_reuse_input_index) != SUCCESS,
  870. GELOGI("Get dst_reuse_input_index failed"));
  871. if (dst_reuse_input && (dst_reuse_input_index == static_cast<uint32_t>(in_anchor->GetIdx()))) {
  872. block->AddNodeTypeIndex({owner_node, kOutput, i, true}, block->Size(), block->Size());
  873. out_count_reuse_input += 1;
  874. reuse_input = true;
  875. }
  876. }
  877. }
  878. block->ref_count_ = reuse_input ? out_count_reuse_input + out_count - 1 : out_count;
  879. return block;
  880. }
  881. bool IsOutputBlock(const ge::InDataAnchorPtr &in_data_anchor) {
  882. auto peer_out_anchor = in_data_anchor->GetPeerOutAnchor();
  883. GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, GELOGE(FAILED, "Peer out anchor is nullptr."); return false);
  884. auto src = peer_out_anchor->GetOwnerNode();
  885. int32_t index = peer_out_anchor->GetIdx();
  886. auto iter = GetLocalOmgContext().out_nodes_map.find(src->GetName());
  887. if (iter != GetLocalOmgContext().out_nodes_map.end()) {
  888. for (auto id : iter->second) {
  889. if (index == id) {
  890. return true;
  891. }
  892. }
  893. }
  894. return false;
  895. }
  896. // atomic out memory will be reassigned
  897. bool IsAtomicOutputMemory(const ge::NodePtr &node, uint32_t output_index, bool is_atomic,
  898. bool out_node_set_continuous_input) {
  899. auto op_desc = node->GetOpDesc();
  900. if (op_desc == nullptr) {
  901. return false;
  902. }
  903. vector<int64_t> atomic_output_index;
  904. // If GetListInt fail, atomic_output_index is empty.
  905. (void)ge::AttrUtils::GetListInt(op_desc, ATOMIC_ATTR_OUTPUT_INDEX, atomic_output_index);
  906. if (!out_node_set_continuous_input && is_atomic) {
  907. for (auto &index : atomic_output_index) {
  908. if (static_cast<uint32_t>(index) == output_index) {
  909. if (node->GetOwnerComputeGraph() != nullptr) {
  910. string graph_name = node->GetOwnerComputeGraph()->GetName();
  911. GELOGD("[IMAS]Atomic no assign %s name[%s] output[%d] streamid[%ld].", graph_name.c_str(),
  912. op_desc->GetName().c_str(), index, op_desc->GetStreamId());
  913. }
  914. return true;
  915. }
  916. }
  917. }
  918. return false;
  919. }
  920. bool IsKnownSubgraphData(const NodePtr &node) {
  921. if (NodeUtils::IsDynamicShape(node)) {
  922. return false;
  923. }
  924. return node->GetOpDesc()->HasAttr(ATTR_NAME_PARENT_NODE_INDEX);
  925. }
  926. void BlockMemAssigner::ReleaseMemory(MemoryBlock *to_release, vector<MemoryBlock *> &reusable_memory) {
  927. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(to_release == nullptr, return, "Input parameter to_release is null.");
  928. GE_CHK_TRUE_EXEC_INFO(to_release->ref_count_ <= 0, return, "Release memory");
  929. GE_CHK_TRUE_EXEC_INFO(!to_release->reuse_mem_, return, "doesn't reuse memory");
  930. --to_release->ref_count_;
  931. if (to_release->ref_count_ == 0) {
  932. to_release->SetLifeTimeEnd(life_time_);
  933. reusable_memory.emplace_back(to_release);
  934. AddReusableBlockCount(*to_release, reusable_block_counts_);
  935. }
  936. }
  937. void BlockMemAssigner::ReleaseMemorys(const vector<MemoryBlock *> &to_releases,
  938. vector<MemoryBlock *> &reusable_memory) {
  939. for (auto mem_block : to_releases) {
  940. ReleaseMemory(mem_block, reusable_memory);
  941. }
  942. }
  943. void BlockMemAssigner::ReleaseInputNodeOutMemory(const unordered_map<string, vector<MemoryBlock *>> &node_out_blocks,
  944. vector<MemoryBlock *> &reusable_memory, NodePtr &node) {
  945. for (const auto &in_anchor : node->GetAllInDataAnchors()) {
  946. if ((in_anchor->GetPeerOutAnchor() == nullptr) ||
  947. (in_anchor->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc() == nullptr) || (node->GetOpDesc() == nullptr)) {
  948. return;
  949. }
  950. GE_IF_BOOL_EXEC(IsOutputBlock(in_anchor), continue);
  951. auto node_name = in_anchor->GetPeerOutAnchor()->GetOwnerNode()->GetName();
  952. GE_IF_BOOL_EXEC((in_anchor->GetPeerOutAnchor()->GetOwnerNode()->GetType() == CONSTANT) ||
  953. (in_anchor->GetPeerOutAnchor()->GetOwnerNode()->GetType() == FASTRCNNPREDICTIONS) ||
  954. (in_anchor->GetPeerOutAnchor()->GetOwnerNode()->GetType() == CONSTANTOP),
  955. continue);
  956. auto it = node_out_blocks.find(node_name);
  957. if (it == node_out_blocks.end()) {
  958. continue;
  959. }
  960. for (auto block : it->second) {
  961. const vector<NodeTypeIndex> &node_type_indexs = block->NodeTypeIndexList();
  962. if (node_type_indexs.empty()) {
  963. continue;
  964. }
  965. GELOGD("node_type_indexs: %d, %s", node_type_indexs.back().index,
  966. node_type_indexs.back().node->GetName().c_str());
  967. if ((node_type_indexs.back().node == in_anchor->GetPeerOutAnchor()->GetOwnerNode()) &&
  968. (node_type_indexs.back().index == static_cast<uint32_t>(in_anchor->GetPeerOutAnchor()->GetIdx())) &&
  969. (node->GetOpDesc()->GetStreamId() == block->stream_id_)) {
  970. ReleaseMemory(block, reusable_memory);
  971. if (block->ref_count_ == 0) {
  972. SetLastUsedInputMemAttr(node, in_anchor->GetIdx());
  973. }
  974. }
  975. }
  976. }
  977. }
  978. void SplitStringByComma(const string &str, vector<string> &sub_str_vec) {
  979. std::string tmp_string = str + ",";
  980. std::string::size_type start_pos = 0;
  981. std::string::size_type cur_pos = tmp_string.find(',', 0);
  982. while (cur_pos != std::string::npos) {
  983. std::string sub_str = tmp_string.substr(start_pos, cur_pos - start_pos);
  984. if (!sub_str.empty()) {
  985. vector<string>::iterator ret = std::find(sub_str_vec.begin(), sub_str_vec.end(), sub_str);
  986. if (ret == sub_str_vec.end()) {
  987. sub_str_vec.push_back(sub_str);
  988. }
  989. }
  990. start_pos = cur_pos + 1;
  991. cur_pos = tmp_string.find(',', start_pos);
  992. }
  993. }
  994. void CheckAndGetOpReuseEnv(const string &env, vector<string> &env_vec, bool &op_reuse_env_valid) {
  995. string env_str;
  996. env_str = string(env);
  997. if (env_str.size() > kReuseMaxCharNum) {
  998. GELOGE(FAILED, "The OP_NO_REUSE_MEM has more than %d characters.", kReuseMaxCharNum);
  999. return;
  1000. }
  1001. SplitStringByComma(env_str, env_vec);
  1002. if (env_vec.size() > kReuseMaxOpNum) {
  1003. GELOGE(FAILED, "The OP_NO_REUSE_MEM has more than %d nodes.", kReuseMaxOpNum);
  1004. return;
  1005. }
  1006. op_reuse_env_valid = true;
  1007. return;
  1008. }
  1009. Status BlockMemAssigner::AssignOutputMemoryWithReuse(const NodePtr &node, vector<int64_t> &ranges) {
  1010. auto op_desc = node->GetOpDesc();
  1011. int64_t stream_id = op_desc->GetStreamId();
  1012. vector<int64_t> memorys_type;
  1013. bool has_mem_type_attr = ge::AttrUtils::GetListInt(op_desc, ATTR_NAME_OUTPUT_MEM_TYPE_LIST, memorys_type);
  1014. GELOGI("Assign memory node[%s], output size[%d], output memory type size[%d]", op_desc->GetName().c_str(),
  1015. op_desc->GetOutputsSize(), memorys_type.size());
  1016. if (has_mem_type_attr && (memorys_type.size() != op_desc->GetOutputsSize())) {
  1017. GELOGE(INTERNAL_ERROR, "fusion: node[%s], output memory size err[outputsize:%zu, memorysize:%zu]",
  1018. op_desc->GetName().c_str(), op_desc->GetOutputsSize(), memorys_type.size());
  1019. return INTERNAL_ERROR;
  1020. }
  1021. is_op_reuse_mem_ = true;
  1022. if (op_reuse_env_valid_ == true) {
  1023. vector<string>::iterator it_name =
  1024. std::find(op_no_reuse_mem_vec_.begin(), op_no_reuse_mem_vec_.end(), op_desc->GetName());
  1025. vector<string>::iterator it_type =
  1026. std::find(op_no_reuse_mem_vec_.begin(), op_no_reuse_mem_vec_.end(), op_desc->GetType());
  1027. GE_IF_BOOL_EXEC(it_name != op_no_reuse_mem_vec_.end() || it_type != op_no_reuse_mem_vec_.end(),
  1028. is_op_reuse_mem_ = false;);
  1029. }
  1030. bool is_atomic = false;
  1031. // If GetBool fail, is_atomic is false.
  1032. (void)ge::AttrUtils::GetBool(op_desc, ATOMIC_ATTR_IS_ATOMIC_NODE, is_atomic);
  1033. // Allocate memory for the current node and release node memory of the same size in the workspace
  1034. GE_IF_BOOL_EXEC(ge_disable_reuse_mem_env_ != "1",
  1035. ReleaseMemorys(stream_workspace_blocks_[stream_id], reusable_blocks_[stream_id]));
  1036. if (IsContinuousOutput(node)) {
  1037. (void)ApplyContinuousMemory(node, ranges, is_op_reuse_mem_);
  1038. return SUCCESS;
  1039. }
  1040. for (uint32_t i = 0; i < static_cast<uint32_t>(op_desc->GetOutputsSize()); i++) {
  1041. int64_t size = 0;
  1042. auto output_op_desc = op_desc->GetOutputDescPtr(i);
  1043. if (output_op_desc != nullptr) {
  1044. GE_IF_BOOL_EXEC(ge::TensorUtils::GetSize(*output_op_desc, size) != SUCCESS, GELOGI("Get size failed"));
  1045. }
  1046. // fusion: other type's size not means malloc HBM memory
  1047. bool l1_flag = has_mem_type_attr && memorys_type[i] == RT_MEMORY_L1;
  1048. if (l1_flag) {
  1049. GELOGI("fusion: node[%s], output[%s], output memory type [%d]", op_desc->GetName().c_str(),
  1050. op_desc->GetOutputNameByIndex(i).c_str(), memorys_type[i]);
  1051. size = 0;
  1052. }
  1053. std::string peer_name;
  1054. uint32_t peer_input_index = 0;
  1055. bool out_node_set_continuous_input = false;
  1056. bool no_need_assign_memory = ((size == 0) || CheckIsZeroMemNodeType(node->GetType()));
  1057. if (!no_need_assign_memory) {
  1058. out_node_set_continuous_input =
  1059. IsOutNodeSetContinuousInput(node, i, peer_name, peer_input_index, no_need_assign_memory);
  1060. GE_IF_BOOL_EXEC(!no_need_assign_memory,
  1061. no_need_assign_memory = IsAtomicOutputMemory(node, i, is_atomic, out_node_set_continuous_input););
  1062. }
  1063. no_need_assign_memory = (no_need_assign_memory || IsKnownSubgraphData(node));
  1064. if (no_need_assign_memory) {
  1065. zero_memory_list_.emplace_back(node, kOutput, i, false);
  1066. continue;
  1067. }
  1068. // atomic can't be reused
  1069. bool need_change = is_op_reuse_mem_ && out_node_set_continuous_input && is_atomic;
  1070. if (need_change) {
  1071. is_op_reuse_mem_ = false;
  1072. }
  1073. MemoryBlock *mem_block = ApplyOutMemory(node, i, ranges, is_op_reuse_mem_, out_node_set_continuous_input);
  1074. if (mem_block != nullptr) {
  1075. node_out_blocks_[node->GetName()].emplace_back(mem_block);
  1076. if (out_node_set_continuous_input) {
  1077. node_continuous_input_blocks_[peer_name][peer_input_index] = mem_block;
  1078. }
  1079. NodeIndexIO node_index_io(node, i, kOut);
  1080. auto iter = anchor_to_symbol_.find(node_index_io.ToString());
  1081. if (iter == anchor_to_symbol_.end()) {
  1082. continue;
  1083. }
  1084. symbol_blocks_[iter->second] = mem_block;
  1085. }
  1086. }
  1087. return SUCCESS;
  1088. }
  1089. ///
  1090. /// @ingroup domi
  1091. /// @brief traverse all nodes outputs and workspace in need, apply memory block considering memory reuse
  1092. /// @param [in/out] ranges memory size provided
  1093. /// @return Status result
  1094. ///
  1095. void BlockMemAssigner::AssignMemoryWithReuse(vector<int64_t> &ranges) {
  1096. (void)ge::GetContext().GetOption(OPTION_EXEC_DISABLE_REUSED_MEMORY, ge_disable_reuse_mem_env_);
  1097. GEEVENT("Reuse memory %s", ge_disable_reuse_mem_env_ == "1" ? "close" : "open");
  1098. string op_no_reuse_mem_str;
  1099. const char *op_no_reuse_mem = std::getenv(OP_NO_REUSE_MEM);
  1100. GE_IF_BOOL_EXEC(op_no_reuse_mem != nullptr, op_no_reuse_mem_str = string(op_no_reuse_mem);
  1101. CheckAndGetOpReuseEnv(op_no_reuse_mem_str, op_no_reuse_mem_vec_, op_reuse_env_valid_););
  1102. for (NodePtr &n : compute_graph_->GetAllNodes()) {
  1103. auto node_op_desc = n->GetOpDesc();
  1104. GE_IF_BOOL_EXEC(node_op_desc == nullptr, continue);
  1105. life_time_ = node_op_desc->GetId();
  1106. int64_t stream_id = node_op_desc->GetStreamId();
  1107. if (AssignOutputMemoryWithReuse(n, ranges) != SUCCESS) {
  1108. return;
  1109. }
  1110. stream_workspace_blocks_[stream_id].clear();
  1111. vector<int64_t> temp;
  1112. GetNodeWorkSpaceSize(n, temp);
  1113. vector<int64_t> workspace_bytes;
  1114. vector<int64_t> workspace_memory_type;
  1115. bool has_workspace_mem_type_attr =
  1116. ge::AttrUtils::GetListInt(node_op_desc, TVM_ATTR_NAME_WORKSPACE_TYPE, workspace_memory_type);
  1117. vector<bool> workspace_reuse_flag;
  1118. GE_IF_BOOL_EXEC(!ge::AttrUtils::GetListBool(node_op_desc, kAttrNameWorkspaceReuseFlag, workspace_reuse_flag),
  1119. GELOGD("OP %s get workspace_reuse_flag attr failed", node_op_desc->GetName().c_str()));
  1120. GELOGI("Assign memory node[%s], size [temp:%zu, memory type size:%zu]", node_op_desc->GetName().c_str(),
  1121. temp.size(), workspace_memory_type.size());
  1122. if (has_workspace_mem_type_attr && (temp.size() != workspace_memory_type.size())) {
  1123. GELOGE(INTERNAL_ERROR, "fusion: node[%s], workspace_memory size err![v_temp:%zu, workspace:%zu]",
  1124. n->GetName().c_str(), temp.size(), workspace_memory_type.size());
  1125. return;
  1126. }
  1127. for (size_t i = 0; i < temp.size(); i++) {
  1128. // fusion: other type's size not means malloc HBM memory
  1129. bool workspace_skip_flag = false;
  1130. if (has_workspace_mem_type_attr && workspace_memory_type[i] == RT_MEMORY_L1) {
  1131. GELOGI(
  1132. "fusion: node[%s]workspace index[%d] is not hbm type, add to zero_memory_list, workspace memory type [%ld]",
  1133. node_op_desc->GetName().c_str(), i, workspace_memory_type[i]);
  1134. workspace_skip_flag = true;
  1135. }
  1136. if (temp[i] == 0 || workspace_skip_flag) {
  1137. zero_memory_list_.emplace_back(n, kWorkspace, static_cast<uint32_t>(i), false);
  1138. continue;
  1139. }
  1140. MemoryBlock *mem_block = ApplyMemory(GetBlockSize(static_cast<size_t>(temp[i]), ranges),
  1141. static_cast<size_t>(temp[i]), static_cast<size_t>(temp[i]), kWorkspace, n,
  1142. static_cast<uint32_t>(i), workspace_reuse_flag, is_op_reuse_mem_, false);
  1143. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(mem_block == nullptr, continue, "failed to apply memory block.");
  1144. CheckWorkspaceReuse(workspace_reuse_flag, i, stream_id, mem_block);
  1145. }
  1146. ReleaseInputNodeOutMemory(node_out_blocks_, reusable_blocks_[stream_id], n);
  1147. }
  1148. GELOGD("Assigned memory blocks:");
  1149. for (auto mem_block : memory_blocks_) {
  1150. GELOGD("%s", mem_block->String().c_str());
  1151. (void)mem_block; // Fix warning
  1152. }
  1153. bool merge_dynamic_batch = false;
  1154. GE_IF_BOOL_EXEC(!(ge_disable_reuse_mem_env_ == "1"), merge_dynamic_batch = MergeDynamicBatchBlocks());
  1155. GE_IF_BOOL_EXEC((!(ge_disable_reuse_mem_env_ == "1") && !merge_dynamic_batch), ReuseBlocksByLifeTime(ranges.size()));
  1156. AssignContinuousBlocks();
  1157. ResizeMemoryBlocks();
  1158. GELOGD("Memory blocks after resize:");
  1159. for (auto mem_block : memory_blocks_) {
  1160. GELOGD("%s", mem_block->String().c_str());
  1161. (void)mem_block; // Fix warning
  1162. }
  1163. }
  1164. void BlockMemAssigner::CheckWorkspaceReuse(const vector<bool> &workspace_reuse_flag, uint32_t index, int64_t stream_id,
  1165. MemoryBlock *mem_block) {
  1166. bool reuse_mem_flag =
  1167. ((workspace_reuse_flag.size() > index) && (workspace_reuse_flag[index] == false)) ? false : true;
  1168. if (reuse_mem_flag) {
  1169. stream_workspace_blocks_[stream_id].emplace_back(mem_block);
  1170. }
  1171. }
  1172. void BlockMemAssigner::GetNodeWorkSpaceSize(const NodePtr &node, vector<int64_t> &workspace_memory) {
  1173. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(node->GetOpDesc() == nullptr, return, "Op desc is null.");
  1174. vector<int64_t> workspace_byte_nums = node->GetOpDesc()->GetWorkspaceBytes();
  1175. GELOGD("GetNodeWorkSpaceSize: node[%s] size:%zu", node->GetOpDesc()->GetName().c_str(), workspace_byte_nums.size());
  1176. for (int64_t byte_size : workspace_byte_nums) {
  1177. workspace_memory.emplace_back(byte_size);
  1178. GELOGD("GetNodeWorkSpaceSize: push back size:%ld", byte_size);
  1179. }
  1180. }
  1181. // descending order
  1182. static bool CompareBlockMaxSize(MemoryBlock *left, MemoryBlock *right) {
  1183. if (left == nullptr || right == nullptr) {
  1184. return false;
  1185. }
  1186. auto left_max_size = std::max_element(left->RealSizeList().begin(), left->RealSizeList().end());
  1187. if (left_max_size != left->RealSizeList().end()) {
  1188. auto right_max_size = std::max_element(right->RealSizeList().begin(), right->RealSizeList().end());
  1189. if (right_max_size == right->RealSizeList().end() || (*left_max_size > *right_max_size)) {
  1190. return true;
  1191. }
  1192. }
  1193. return false;
  1194. }
  1195. void MergeBlocks(std::vector<MemoryBlock *> &dest, std::vector<MemoryBlock *> &src) {
  1196. for (size_t i = 0; i < dest.size(); ++i) {
  1197. if (i >= src.size()) {
  1198. return;
  1199. }
  1200. if (dest[i] != nullptr && src[i] != nullptr) {
  1201. if (!dest[i]->reuse_mem_ || !src[i]->reuse_mem_) {
  1202. GELOGD("Diff batch's workspace can't be reused, i: %zu, dest[i]: %s, stream: %ld, src[i]: %s, stream: %ld.", i,
  1203. dest[i]->String().c_str(), dest[i]->stream_id_, src[i]->String().c_str(), src[i]->stream_id_);
  1204. continue;
  1205. }
  1206. for (auto &symbol : src[i]->SymbolList()) {
  1207. dest[i]->AddSymbol(symbol);
  1208. }
  1209. for (size_t j = 0; j < src[i]->NodeTypeIndexList().size(); ++j) {
  1210. dest[i]->AddNodeTypeIndex(src[i]->NodeTypeIndexList()[j], src[i]->RealSizeList()[j],
  1211. src[i]->NoAlignSizeList()[j]);
  1212. src[i]->deleted_block_ = true;
  1213. }
  1214. }
  1215. }
  1216. }
  1217. bool BlockMemAssigner::MergeDynamicBatchBlocks() {
  1218. bool merged = false;
  1219. std::map<std::string, std::vector<MemoryBlock *>> dynamic_batch_blocks;
  1220. for (auto block : memory_blocks_) {
  1221. if (block == nullptr) {
  1222. continue;
  1223. }
  1224. std::string batch_label;
  1225. if (block->IsSameLabel(batch_label)) {
  1226. dynamic_batch_blocks[batch_label].emplace_back(block);
  1227. }
  1228. }
  1229. auto it = dynamic_batch_blocks.begin();
  1230. auto it_max = it;
  1231. // find max block counts
  1232. for (; it != dynamic_batch_blocks.end(); ++it) {
  1233. if (it->second.size() > it_max->second.size()) {
  1234. it_max = it;
  1235. }
  1236. std::sort(it->second.begin(), it->second.end(), CompareBlockMaxSize);
  1237. }
  1238. if (it_max != dynamic_batch_blocks.end()) {
  1239. GELOGD("MergeDynamicBatch %s block counts %zu", it_max->first.c_str(), it_max->second.size());
  1240. }
  1241. for (it = dynamic_batch_blocks.begin(); it != dynamic_batch_blocks.end(); ++it) {
  1242. if (it != it_max) {
  1243. GELOGD("MergeDynamicBatch from %s to %s", it->first.c_str(), it_max->first.c_str());
  1244. MergeBlocks(it_max->second, it->second);
  1245. merged = true;
  1246. }
  1247. }
  1248. return merged;
  1249. }
  1250. // asending order
  1251. static bool CompareBlockIndex(MemoryBlock *left, MemoryBlock *right) {
  1252. if (left == nullptr || right == nullptr) {
  1253. return false;
  1254. }
  1255. if (left->input_index_ < right->input_index_) {
  1256. return true;
  1257. }
  1258. return false;
  1259. }
  1260. ///
  1261. /// @ingroup domi
  1262. /// @brief order blocks by continuous input index
  1263. /// @param [in] blocks need be processed
  1264. /// @param [in] input blocks need continuous
  1265. /// @param [out] blocks after continuous order
  1266. /// @param [in/out] blocks ordered
  1267. /// @param [in] input or output
  1268. ///
  1269. void ReAssignContinuousBlocks(const std::vector<MemoryBlock *> &org_blocks,
  1270. const std::map<MemoryBlock *, uint32_t> block_map,
  1271. std::vector<MemoryBlock *> &dest_blocks, std::vector<MemoryBlock *> &continuous_blocks,
  1272. const std::string &type) {
  1273. for (auto &memory_block : org_blocks) {
  1274. if (memory_block == nullptr || memory_block->deleted_block_) {
  1275. continue;
  1276. }
  1277. if (block_map.find(memory_block) != block_map.end()) {
  1278. continue;
  1279. }
  1280. dest_blocks.emplace_back(memory_block);
  1281. }
  1282. // add continuous block
  1283. std::sort(continuous_blocks.begin(), continuous_blocks.end(), CompareBlockIndex);
  1284. size_t count = 0;
  1285. for (auto &memory_block : continuous_blocks) {
  1286. GE_IF_BOOL_EXEC(memory_block == nullptr, continue);
  1287. GELOGI("Block continuous %s index:%d", type.c_str(), memory_block->input_index_);
  1288. count++;
  1289. if (count == 1) {
  1290. memory_block->first_continuous_block_ = true;
  1291. }
  1292. if (count == continuous_blocks.size()) {
  1293. memory_block->last_continuous_block_ = true;
  1294. }
  1295. dest_blocks.emplace_back(memory_block);
  1296. }
  1297. }
  1298. void BlockMemAssigner::AssignContinuousBlocks() {
  1299. for (auto &block_map : node_continuous_input_blocks_) {
  1300. std::vector<MemoryBlock *> dest_memory_blocks;
  1301. std::map<MemoryBlock *, uint32_t> continuous_block_map;
  1302. std::vector<MemoryBlock *> continuous_blocks;
  1303. auto it = node_continuous_input_counts_.find(block_map.first);
  1304. GE_IF_BOOL_EXEC(it == node_continuous_input_counts_.end(), continue);
  1305. GELOGI("Node:%s continuous input block count:%zu input count:%u", block_map.first.c_str(), block_map.second.size(),
  1306. it->second);
  1307. GE_IF_BOOL_EXEC(it->second != block_map.second.size(), continue);
  1308. for (auto &it : block_map.second) {
  1309. if (it.second != nullptr) {
  1310. continuous_block_map[it.second] = it.first;
  1311. it.second->input_index_ = it.first;
  1312. continuous_blocks.emplace_back(it.second);
  1313. }
  1314. }
  1315. if (continuous_block_map.size() != continuous_blocks.size()) {
  1316. GELOGW("Node:%s continuous input map size:%zu vector size:%zu", block_map.first.c_str(),
  1317. continuous_block_map.size(), continuous_blocks.size());
  1318. continue;
  1319. }
  1320. ReAssignContinuousBlocks(memory_blocks_, continuous_block_map, dest_memory_blocks, continuous_blocks, "input");
  1321. memory_blocks_.swap(dest_memory_blocks);
  1322. }
  1323. }
  1324. void BlockMemAssigner::ReuseBlocksByLifeTime(size_t range_size) {
  1325. // 1 means block size is same so no need to do this
  1326. if (range_size <= 1) {
  1327. return;
  1328. }
  1329. for (size_t i = 0; i < memory_blocks_.size(); ++i) {
  1330. auto parent = memory_blocks_[i];
  1331. if (parent == nullptr || parent->deleted_block_ || parent->continuous_block_) {
  1332. continue;
  1333. }
  1334. if (parent->reuse_mem_ && !IsPostReuse(parent)) {
  1335. parent->reuse_mem_ = false;
  1336. }
  1337. for (size_t j = i + 1; j < memory_blocks_.size(); ++j) {
  1338. auto child = memory_blocks_[j];
  1339. if (child == nullptr) {
  1340. continue;
  1341. }
  1342. // If node is before atomic_addr_clean node, the continus memory can't be reused.
  1343. if (!parent->NodeTypeIndexList().empty() && child->continuous_block_) {
  1344. auto node = parent->NodeTypeIndexList()[0].node;
  1345. if (node == nullptr || node->GetOpDesc() == nullptr || (node->GetOpDesc()->GetId() < GetAtomicAddrCleanId())) {
  1346. continue;
  1347. }
  1348. }
  1349. parent->AddLifeReuseBlock(child, total_node_depend_stream_life_);
  1350. }
  1351. }
  1352. }
  1353. ///
  1354. /// @ingroup domi_omg
  1355. /// @brief traverse memory size, resize, calculate offset
  1356. /// @param [in&out] memory_blocks_ memory block, after calculating offset
  1357. ///
  1358. void BlockMemAssigner::ResizeMemoryBlocks() {
  1359. for (auto &memory_block : memory_blocks_) {
  1360. if (memory_block == nullptr || memory_block->deleted_block_ || memory_block->is_zero_copy_) {
  1361. continue;
  1362. }
  1363. if (memory_block->first_continuous_block_) {
  1364. mem_offset_ += MEM_ALIGN_SIZE;
  1365. }
  1366. memory_block->Resize();
  1367. memory_block->SetHeadOffset(mem_offset_);
  1368. mem_offset_ += memory_block->Size();
  1369. memory_block->SetTailOffset(mem_offset_ - 1);
  1370. }
  1371. GELOGI("mem_offset_ exclude zero_copy_memory is %zu.", mem_offset_);
  1372. }
  1373. ///
  1374. /// @ingroup domi
  1375. /// @brief given NodeTypeIndex, set offset in Op's OpDef
  1376. /// @param [in&out] node_type_index <node, memory type, id>
  1377. /// @param [in] offset offset to be set
  1378. /// @param [in] size memory size
  1379. /// @param [in] real_size memory size in need
  1380. /// @return Status result
  1381. ///
  1382. void SetOffsetSize(const NodeTypeIndex &node_type, const MemoryBlock *block, size_t real_size, size_t no_align_size,
  1383. bool child_block) {
  1384. ge::OpDescPtr op_desc = node_type.node->GetOpDesc();
  1385. GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(op_desc == nullptr, return, "op_desc is null.");
  1386. string graph_name = node_type.node->GetOwnerComputeGraph()->GetName();
  1387. vector<int64_t> memorys_type;
  1388. int64_t offset = block->HeadOffset();
  1389. size_t end = node_type.life_time_end;
  1390. bool has_mem_type_attr = ge::AttrUtils::GetListInt(op_desc, ATTR_NAME_OUTPUT_MEM_TYPE_LIST, memorys_type);
  1391. if (node_type.mem_type == kOutput) {
  1392. vector<int64_t> output_list = op_desc->GetOutputOffset();
  1393. for (auto i = static_cast<uint32_t>(output_list.size()); i < node_type.index + 1; i++) {
  1394. output_list.emplace_back(kInvalidOffset);
  1395. }
  1396. if (output_list.empty()) {
  1397. GELOGW("Empty output");
  1398. return;
  1399. }
  1400. static const set<string> kSetOffsetTypes = {DATA_TYPE, AIPP_DATA_TYPE, MULTISHAPE, NETOUTPUT};
  1401. if ((kSetOffsetTypes.count(op_desc->GetType()) > 0) && !IsKnownSubgraphData(node_type.node)) {
  1402. if ((output_list[node_type.index] == kInvalidOffset) || (output_list[node_type.index] < offset)) {
  1403. output_list.at(node_type.index) = offset;
  1404. }
  1405. } else {
  1406. // fusion: keep the original other type offset value from op_desc
  1407. bool set_out_offset = (!has_mem_type_attr) ||
  1408. (memorys_type.size() > node_type.index && memorys_type[node_type.index] != RT_MEMORY_L1);
  1409. if (set_out_offset) {
  1410. output_list.at(node_type.index) = offset;
  1411. }
  1412. }
  1413. op_desc->SetOutputOffset(output_list);
  1414. } else if (node_type.mem_type == kWorkspace) {
  1415. vector<int64_t> workspace_list;
  1416. workspace_list = op_desc->GetWorkspace();
  1417. for (auto i = static_cast<uint32_t>(workspace_list.size()); i < node_type.index + 1; i++) {
  1418. workspace_list.emplace_back(kInvalidOffset);
  1419. }
  1420. vector<int64_t> workspace_mem_type;
  1421. bool has_workspace_mem_type = ge::AttrUtils::GetListInt(op_desc, TVM_ATTR_NAME_WORKSPACE_TYPE, workspace_mem_type);
  1422. // fusion: keep the original other type offset value from op_desc
  1423. bool set_workspace_offset = (!has_workspace_mem_type) || (workspace_mem_type.size() > node_type.index &&
  1424. workspace_mem_type[node_type.index] != RT_MEMORY_L1);
  1425. if (set_workspace_offset) {
  1426. workspace_list.at(node_type.index) = offset;
  1427. }
  1428. op_desc->SetWorkspace(workspace_list);
  1429. }
  1430. GELOGI(
  1431. "[IMAS]Set %s name[%s] %s[%u] offset to [%ld] streamid[%ld] size[%zu] realsize[%zu]"
  1432. " noalignsize[%zu] life time begin[%zu] life time end[%zu] child[%d:%d:%d:%d] isref[%d].",
  1433. graph_name.c_str(), op_desc->GetName().c_str(), node_type.GetMemType().c_str(), node_type.index, offset,
  1434. op_desc->GetStreamId(), block->Size(), real_size, no_align_size, op_desc->GetId(), end, child_block,
  1435. block->reuse_mem_, block->continuous_block_, block->deleted_block_, node_type.ref_input);
  1436. }
  1437. void SetBlockOpMemOffset(MemoryBlock *block, bool child_block) {
  1438. if (block == nullptr) {
  1439. return;
  1440. }
  1441. size_t index = 0;
  1442. size_t real_size = 0;
  1443. size_t no_align_size = 0;
  1444. auto real_size_list_size = block->RealSizeList().size();
  1445. for (const NodeTypeIndex &node_type_index : block->NodeTypeIndexList()) {
  1446. if (index < real_size_list_size) {
  1447. real_size = block->RealSizeList()[index];
  1448. no_align_size = block->NoAlignSizeList()[index];
  1449. }
  1450. SetOffsetSize(node_type_index, block, real_size, no_align_size, child_block);
  1451. index++;
  1452. }
  1453. }
  1454. void BlockMemAssigner::SetOpMemOffset(bool is_zero_copy) {
  1455. for (MemoryBlock *memory_block : memory_blocks_) {
  1456. if (memory_block == nullptr || memory_block->deleted_block_) {
  1457. continue;
  1458. }
  1459. if ((is_zero_copy && !memory_block->is_zero_copy_) || (!is_zero_copy && memory_block->is_zero_copy_)) {
  1460. continue;
  1461. }
  1462. SetBlockOpMemOffset(memory_block, false);
  1463. for (MemoryBlock *child_block : memory_block->ChildBlockList()) {
  1464. SetBlockOpMemOffset(child_block, true);
  1465. }
  1466. }
  1467. if (!is_zero_copy) {
  1468. for (const NodeTypeIndex &node_type_index : zero_memory_list_) {
  1469. MemoryBlock block(0, 0);
  1470. SetOffsetSize(node_type_index, &block, 0, 0, false);
  1471. }
  1472. }
  1473. }
  1474. Status BlockMemAssigner::Assign() {
  1475. vector<int64_t> ranges;
  1476. if (GetMemoryRanges(ranges) != SUCCESS) {
  1477. GELOGE(FAILED, "GetMemoryRanges Fail!");
  1478. return FAILED;
  1479. }
  1480. GE_IF_BOOL_EXEC(ranges.empty(), return SUCCESS);
  1481. AssignMemoryWithReuse(ranges);
  1482. SetOpMemOffset(false);
  1483. return SUCCESS;
  1484. }
  1485. bool BlockMemAssigner::CheckIsZeroMemNodeType(const string &node_type) const {
  1486. return (node_type == VARIABLE) || (node_type == CONSTANT) || (node_type == MULTISHAPE) ||
  1487. (node_type == HCOMBROADCAST) || (node_type == CONSTANTOP) || (node_type == ASSIGNADD) ||
  1488. (node_type == ASSIGNSUB) || (node_type == ASSIGN) || (node_type == HVDWAIT) ||
  1489. (node_type == HVDCALLBACKBROADCAST);
  1490. }
  1491. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示