You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

host_mem_manager.cc 5.6 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/manager/host_mem_manager.h"
  17. #include <sstream>
  18. #include "graph/ge_context.h"
  19. #include "graph/utils/tensor_utils.h"
  20. #include "runtime/mem.h"
  21. namespace {
  22. const uint32_t kMallocHostMemFlag = 0;
  23. } // namespace
  24. namespace ge {
  25. Status SharedMemAllocator::Allocate(SharedMemInfo &mem_info) {
  26. auto device_id = GetContext().DeviceId();
  27. GELOGD("SharedMemAllocator::Malloc host mem size= %zu for devid:[%u].", mem_info.mem_size, device_id);
  28. auto dev_id = static_cast<int32_t>(device_id);
  29. GE_CHK_RT_RET(rtSetDevice(dev_id));
  30. // DeviceReset before memory finished!
  31. GE_MAKE_GUARD(not_used_var, [&] { GE_CHK_RT(rtDeviceReset(dev_id)); });
  32. rtMallocHostSharedMemoryIn input_para = {mem_info.shm_name.c_str(), mem_info.mem_size, kMallocHostMemFlag};
  33. rtMallocHostSharedMemoryOut output_para;
  34. rtError_t rt_ret = rtMallocHostSharedMemory(&input_para, &output_para);
  35. if (rt_ret != RT_ERROR_NONE) {
  36. GELOGE(RT_FAILED, "Call rt api(rtMallocHostSharedMemory) failed, devid:[%u].", device_id);
  37. return GE_GRAPH_MEMORY_ALLOC_FAILED;
  38. }
  39. mem_info.fd = output_para.fd;
  40. #if (ENABLE_OPEN_SRC != True)
  41. mem_info.host_aligned_ptr = AlignedPtr::BuildAlignedPtr(mem_info.mem_size,
  42. [&output_para](std::unique_ptr<uint8_t[], deleter> &ptr) {
  43. GELOGD("set aligned_ptr, addr=%p", output_para.ptr);
  44. ptr.reset(reinterpret_cast<uint8_t *>(output_para.ptr));
  45. },
  46. [](uint8_t *ptr) {
  47. GELOGD("reset aligned_ptr in SharedMemAllocator, addr=%p", ptr);
  48. ptr = nullptr;
  49. }, 0);
  50. #else
  51. mem_info.host_address = reinterpret_cast<uint8_t *>(output_para.ptr);
  52. #endif
  53. mem_info.device_address = reinterpret_cast<uint8_t *>(output_para.devPtr);
  54. return SUCCESS;
  55. }
  56. Status SharedMemAllocator::DeAllocate(SharedMemInfo &mem_info) {
  57. GELOGD("SharedMemAllocator::DeAllocate");
  58. #if (ENABLE_OPEN_SRC != True)
  59. rtFreeHostSharedMemoryIn free_para = {mem_info.shm_name.c_str(), mem_info.mem_size, mem_info.fd,
  60. mem_info.host_aligned_ptr->MutableGet(), mem_info.device_address};
  61. #else
  62. rtFreeHostSharedMemoryIn free_para = {mem_info.shm_name.c_str(), mem_info.mem_size, mem_info.fd,
  63. mem_info.host_address, mem_info.device_address};
  64. #endif
  65. rtError_t rt_ret = rtFreeHostSharedMemory(&free_para);
  66. if (rt_ret != RT_ERROR_NONE) {
  67. GELOGE(RT_FAILED, "Call rt api(rtFreeHostSharedMemory) failed, ret: 0x%X.", rt_ret);
  68. return RT_FAILED;
  69. }
  70. return ge::SUCCESS;
  71. }
  72. HostMemManager &HostMemManager::Instance() {
  73. static HostMemManager mem_manager;
  74. return mem_manager;
  75. }
  76. Status HostMemManager::Initialize() {
  77. std::lock_guard<std::recursive_mutex> lock(mutex_);
  78. allocator_ = std::unique_ptr<SharedMemAllocator>(new (std::nothrow) SharedMemAllocator());
  79. if (allocator_ == nullptr) {
  80. GELOGE(GE_GRAPH_MALLOC_FAILED, "Shared memory allocator init failed!");
  81. return GE_GRAPH_MALLOC_FAILED;
  82. }
  83. return SUCCESS;
  84. }
  85. void HostMemManager::Finalize() noexcept {
  86. std::lock_guard<std::recursive_mutex> lock(mutex_);
  87. for (auto &it : var_memory_base_map_) {
  88. if (allocator_->DeAllocate(it.second) != SUCCESS) {
  89. GELOGW("Host %s mem release failed!", it.first.c_str());
  90. }
  91. }
  92. var_memory_base_map_.clear();
  93. }
  94. Status HostMemManager::MallocSharedMemory(SharedMemInfo &mem_info) {
  95. std::lock_guard<std::recursive_mutex> lock(mutex_);
  96. auto iter = var_memory_base_map_.find(mem_info.op_name);
  97. if (iter != var_memory_base_map_.end()) {
  98. GELOGE(FAILED, "Host shared memory for op %s has been malloced", mem_info.op_name.c_str());
  99. return FAILED;
  100. }
  101. mem_info.shm_name = OpNameToShmName(mem_info.op_name);
  102. GE_CHECK_NOTNULL(allocator_);
  103. GE_CHK_STATUS_RET(allocator_->Allocate(mem_info));
  104. var_memory_base_map_[mem_info.op_name] = mem_info;
  105. return SUCCESS;
  106. }
  107. Status HostMemManager::QueryVarMemInfo(const string &op_name, uint64_t &base_addr, uint64_t &data_size) {
  108. std::lock_guard<std::recursive_mutex> lock(mutex_);
  109. if (var_memory_base_map_.find(op_name) == var_memory_base_map_.end()) {
  110. GELOGE(INTERNAL_ERROR, "Find host base base_addr failed,node name:%s!", op_name.c_str());
  111. return INTERNAL_ERROR;
  112. }
  113. base_addr = static_cast<uint64_t>(reinterpret_cast<uintptr_t>(var_memory_base_map_[op_name].device_address));
  114. data_size = var_memory_base_map_[op_name].mem_size;
  115. return SUCCESS;
  116. }
  117. string HostMemManager::OpNameToShmName(const string &op_name) {
  118. string sh_name("Ascend_");
  119. std::hash<std::string> hash_str;
  120. sh_name.append(std::to_string(hash_str(op_name)));
  121. return sh_name;
  122. }
  123. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示