You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

rt_model.h 10 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #ifndef __CCE_RUNTIME_MODEL_H__
  17. #define __CCE_RUNTIME_MODEL_H__
  18. #include "base.h"
  19. #ifdef __cplusplus
  20. extern "C" {
  21. #endif
  22. typedef enum tagModelTaskType {
  23. RT_MODEL_TASK_KERNEL = 0,
  24. RT_MODEL_TASK_EVENT_RECORD,
  25. RT_MODEL_TASK_EVENT_WAIT,
  26. RT_MODEL_TASK_FUSION_START,
  27. RT_MODEL_TASK_FUSION_END,
  28. RT_MODEL_TASK_KERNEL_EX,
  29. RT_MODEL_TASK_HCCL,
  30. RT_MODEL_TASK_STREAM_SWITCH,
  31. RT_MODEL_TASK_STREAM_ACTIVE,
  32. RT_MODEL_TASK_LABEL_SET,
  33. RT_MODEL_TASK_LABEL_SWITCH,
  34. RT_MODEL_TASK_LABEL_GOTO,
  35. RT_MODEL_TASK_PROFILER_TRACE,
  36. RT_MODEL_TASK_MEMCPY_ASYNC,
  37. RT_MODEL_TASK_NOTIFY_RECORD,
  38. RT_MODEL_TASK_NOTIFY_WAIT,
  39. RT_MODEL_TASK_REDUCE_ASYNC,
  40. RT_MODEL_TASK_RDMA_SEND,
  41. RT_MODEL_TASK_EVENT_RESET = 18,
  42. RT_MODEL_TASK_MODEL_END_GRAPH,
  43. RT_MODEL_TASK_STREAM_SWITCH_N,
  44. RT_MODEL_TASK_RDMA_DB_SEND,
  45. RT_MODEL_TASK_MEMCPY_ADDR_ASYNC
  46. } rtModelTaskType_t;
  47. typedef enum tagModelStreamType {
  48. RT_MODEL_HEAD_STREAM = 0,
  49. RT_MODEL_WAIT_ACTIVE_STREAM = 1
  50. } rtModelStreamType_t;
  51. typedef enum tagModelQueueFlag {
  52. RT_MODEL_INPUT_QUEUE = 0,
  53. RT_MODEL_OUTPUT_QUEUE = 1
  54. } rtModelQueueFlag_t;
  55. #define EXECUTOR_NONE ((uint32_t)0x0)
  56. #define EXECUTOR_TS ((uint32_t)0x01)
  57. #define EXECUTOR_AICPU ((uint32_t)0x02)
  58. /**
  59. * @ingroup
  60. * @brief the type defination of aicpu model task command
  61. */
  62. typedef enum tagTsAicpuModelCmd {
  63. TS_AICPU_MODEL_LOAD = 1,
  64. TS_AICPU_MODEL_EXECUTE,
  65. TS_AICPU_MODEL_DESTROY,
  66. TS_AICPU_MODEL_ABORT,
  67. TS_AICPU_MODEL_RESERVED,
  68. } tsAicpuModelCmd;
  69. typedef struct tagAicpuTaskInfo {
  70. uint32_t taskID;
  71. uint32_t streamID;
  72. uint32_t kernelType;
  73. uint64_t kernelName;
  74. uint64_t kernelSo;
  75. uint64_t paraBase;
  76. uint32_t taskFlag;
  77. } rtAicpuTaskInfo_t;
  78. typedef struct tagModelStreamInfo {
  79. uint32_t streamID;
  80. uint32_t streamFlag;
  81. } rtModelStreamInfo_t;
  82. typedef struct tagModelQueueInfo {
  83. uint32_t queueID;
  84. uint32_t flag;
  85. } rtModelQueueInfo_t;
  86. typedef struct tagAicpuModelInfo {
  87. uint32_t moduleID;
  88. uint32_t tsId;
  89. uint16_t streamInfoNum;
  90. uint16_t aicpuTaskNum;
  91. uint64_t streamInfoPtr;
  92. uint64_t aicpuTaskPtr;
  93. uint16_t queueSize;
  94. uint64_t queueInfoPtr;
  95. } rtAicpuModelInfo_t;
  96. typedef struct tagKernelTaskInfo {
  97. uint16_t blockDim;
  98. uint16_t argsCount;
  99. uint16_t argsSize;
  100. uint16_t reserved;
  101. char *stubFunc;
  102. uint8_t *smDesc;
  103. uint8_t *args;
  104. uint16_t *argsOffset;
  105. } rtKernelTaskInfo_t;
  106. typedef struct tagKernelTaskInfoEx {
  107. uint32_t flags;
  108. uint32_t argsSize;
  109. void *args;
  110. uint32_t reserved[6];
  111. } rtKernelTaskInfoEx_t;
  112. typedef struct tagEventTaskInfo {
  113. uint32_t eventID;
  114. uint32_t reserved[9];
  115. } rtEventTaskInfo_t;
  116. typedef struct tagStreamSwitchTaskInfo {
  117. int64_t value;
  118. uint64_t pValuePtr;
  119. uint32_t trueStreamID;
  120. uint32_t dataType;
  121. uint32_t reserved[4];
  122. } rtStreamSwitchTaskInfo_t;
  123. typedef struct tagStreamSwitchNTaskInfo {
  124. uint64_t pValuePtr;
  125. uint64_t pTrueStreamPtr;
  126. uint32_t size;
  127. uint32_t elementSize;
  128. uint32_t dataType;
  129. uint32_t reserved[3];
  130. } rtStreamSwitchNTaskInfo_t;
  131. typedef struct tagStreamActiveTaskInfo {
  132. uint32_t activeStreamID;
  133. uint32_t reserved[9];
  134. } rtStreamActiveTaskInfo_t;
  135. typedef struct tagSetTaskInfo {
  136. uint16_t labelId;
  137. uint32_t reserved[9];
  138. } rtLabelSetTaskInfo_t;
  139. typedef struct tagSwitchTaskInfo {
  140. uint32_t value;
  141. uint32_t reserved[9];
  142. } rtLabelSwitchTaskInfo_t;
  143. typedef struct tagLabelGotoTaskInfo {
  144. uint16_t labelId;
  145. uint32_t reserved[9];
  146. } rtLabelGotoTaskInfo_t;
  147. typedef struct tagProfilerTraceTaskInfo {
  148. uint64_t profilerTraceId;
  149. uint32_t notify : 8;
  150. uint32_t reserved_ : 24;
  151. uint32_t flags;
  152. uint32_t reserved[6];
  153. } rtProfilerTrace_t;
  154. typedef struct tagrtMemcpyAsyncTaskInfo {
  155. void *dst;
  156. uint64_t destMax;
  157. void *src;
  158. uint64_t count;
  159. uint32_t kind;
  160. uint32_t reserved;
  161. } rtMemcpyAsyncTaskInfo_t;
  162. typedef struct tagrtNotifyTaskInfo {
  163. uint32_t notifyID;
  164. uint32_t reserved[9];
  165. } rtNotifyTaskInfo_t;
  166. typedef struct tagrtReduceAsyncTaskInfo {
  167. void *dst;
  168. uint64_t destMax;
  169. void *src;
  170. uint64_t count;
  171. uint32_t kind;
  172. uint32_t type;
  173. } rtReduceAsyncTaskInfo_t;
  174. typedef struct tagrtRdmaSendTaskInfo {
  175. uint32_t index;
  176. uint32_t wqe_index;
  177. uint32_t reserved[8];
  178. } rtRdmaSendTaskInfo_t;
  179. typedef struct tagrtRdmaDbSendTaskInfo {
  180. uint64_t dbInfo;
  181. uint32_t dbIndex;
  182. uint32_t reserved[7]; // offset 7
  183. } rtRdmaDbSendTaskInfo_t;
  184. typedef struct tagrtModelEndGraphTaskInfo {
  185. uint32_t modelId;
  186. uint32_t executorFlag;
  187. uint32_t reserved[8];
  188. } rtModelEndGraphTaskInfo_t;
  189. typedef struct tagTaskInfo {
  190. uint32_t type;
  191. uint32_t streamID;
  192. union {
  193. rtKernelTaskInfoEx_t kernelTaskEx;
  194. rtKernelTaskInfo_t kernelTask;
  195. rtEventTaskInfo_t eventTask;
  196. rtStreamSwitchTaskInfo_t streamSwitchTask;
  197. rtStreamActiveTaskInfo_t streamActiveTask;
  198. rtLabelSetTaskInfo_t labelSetTask;
  199. rtLabelSwitchTaskInfo_t labelSwitchTask;
  200. rtLabelGotoTaskInfo_t labelGotoTask;
  201. rtProfilerTrace_t profilertraceTask;
  202. rtMemcpyAsyncTaskInfo_t memcpyAsyncTask;
  203. rtNotifyTaskInfo_t notifyTask;
  204. rtReduceAsyncTaskInfo_t reduceAsyncTask;
  205. rtRdmaSendTaskInfo_t rdmaSendTask;
  206. rtRdmaDbSendTaskInfo_t rdmaDbSendTask;
  207. rtModelEndGraphTaskInfo_t modelEndGraphTask;
  208. rtStreamSwitchNTaskInfo_t streamSwitchNTask;
  209. uint32_t reserved[10];
  210. } u;
  211. } rtTaskInfo_t;
  212. typedef void *rtModel_t;
  213. typedef rtError_t (*rtTaskGenCallback)(rtModel_t model, rtTaskInfo_t *taskInfo);
  214. /**
  215. * @ingroup rt_model
  216. * @brief set callback for generate model
  217. * @param [in] callBack callback function
  218. * @return RT_ERROR_NONE for ok
  219. * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle
  220. */
  221. RTS_API rtError_t rtSetTaskGenCallback(rtTaskGenCallback callback);
  222. /**
  223. * @ingroup rt_model
  224. * @brief create model instance
  225. * @param [out] model created model
  226. * @param [in] flag reserved
  227. * @return RT_ERROR_NONE for ok
  228. * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle
  229. */
  230. RTS_API rtError_t rtModelCreate(rtModel_t *model, uint32_t flag);
  231. /**
  232. * @ingroup rt_model
  233. * @brief destroy model instance
  234. * @param [in] model model to destroy
  235. * @return RT_ERROR_NONE for ok
  236. * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle
  237. */
  238. RTS_API rtError_t rtModelDestroy(rtModel_t model);
  239. /**
  240. * @ingroup rt_model
  241. * @brief bind model and stream instance
  242. * @param [in] model binded model
  243. * @param [in] stream binded stream
  244. * @param [in] flag reserved
  245. * @return RT_ERROR_NONE for ok
  246. * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle
  247. */
  248. RTS_API rtError_t rtModelBindStream(rtModel_t model, rtStream_t stream, uint32_t flag);
  249. /**
  250. * @ingroup rt_model
  251. * @brief unbind model and stream instance
  252. * @param [in] model unbinded model
  253. * @param [in] stream unbinded stream
  254. * @return RT_ERROR_NONE for ok
  255. * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle
  256. */
  257. RTS_API rtError_t rtModelUnbindStream(rtModel_t model, rtStream_t stream);
  258. /**
  259. * @ingroup rt_model
  260. * @brief tell runtime Model has been Loaded
  261. * @param [in] model model to execute
  262. * @return RT_ERROR_NONE for ok
  263. */
  264. RTS_API rtError_t rtModelLoadComplete(rtModel_t model);
  265. /**
  266. * @ingroup rt_model
  267. * @brief execute model instance
  268. * @param [in] model model to execute
  269. * @return RT_ERROR_NONE for ok
  270. * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle
  271. */
  272. RTS_API rtError_t rtModelExecute(rtModel_t model, rtStream_t stream, uint32_t flag);
  273. /**
  274. * @ingroup rt_model
  275. * @brief get model the last persist task id
  276. * @param [in] model model to execute
  277. * @param [out] taskid task id of the model
  278. * @return RT_ERROR_NONE for ok
  279. * @return RT_ERROR_INVALID_VALUE for error input handle
  280. */
  281. RTS_API rtError_t rtModelGetTaskId(rtModel_t model, uint32_t *taskid);
  282. /**
  283. * @ingroup rt_model
  284. * @brief add a end graph task to stream
  285. * @param [in] model model to execute
  286. * @param [in] end graph stream
  287. * @return RT_ERROR_NONE for ok
  288. * @return RT_ERROR_INVALID_VALUE for error input handle
  289. */
  290. RTS_API rtError_t rtEndGraph(rtModel_t model, rtStream_t stream);
  291. /**
  292. * @ingroup rt_model
  293. * @brief add a end graph task with flag to stream
  294. * @param [in] model model to execute
  295. * @param [in] end graph stream
  296. * @param [in] flags AICPU datadump
  297. * @return RT_ERROR_NONE for ok
  298. * @return RT_ERROR_INVALID_VALUE for error input handle
  299. */
  300. RTS_API rtError_t rtEndGraphEx(rtModel_t model, rtStream_t stream, uint32_t flags);
  301. /**
  302. * @ingroup rt_model
  303. * @brief add a end graph task to stream
  304. * @param [in] model model to execute
  305. * @param [in] flags EXECUTOR_TS | EXECUTOR_AICPU
  306. * @return RT_ERROR_NONE for ok
  307. * @return RT_ERROR_INVALID_VALUE for error input handle
  308. */
  309. RTS_API rtError_t rtModelExecutorSet(rtModel_t model, uint8_t flags);
  310. /**
  311. * @ingroup rt_model
  312. * @brief abort model
  313. * @param [in] model model to abort
  314. * @return RT_ERROR_NONE for ok
  315. * @return RT_ERROR_INVALID_VALUE for error input handle
  316. */
  317. RTS_API rtError_t rtModelAbort(rtModel_t model);
  318. /**
  319. * @ingroup rt_model
  320. * @brief bind queue
  321. * @param [in] model model to bind
  322. * @param [in] queueId queueId to bind
  323. * @param [in] flag
  324. * @return RT_ERROR_NONE for ok
  325. * @return RT_ERROR_INVALID_VALUE for error input handle
  326. */
  327. RTS_API rtError_t rtModelBindQueue(rtModel_t model, uint32_t queueId, rtModelQueueFlag_t flag);
  328. /**
  329. * @ingroup rt_model
  330. * @brief get model id
  331. * @param [in] model
  332. * @param [out] modelId model id
  333. * @return RT_ERROR_NONE for ok
  334. * @return RT_ERROR_INVALID_VALUE for error input handle
  335. */
  336. RTS_API rtError_t rtModelGetId(rtModel_t model, uint32_t *modelId);
  337. #ifdef __cplusplus
  338. }
  339. #endif
  340. #endif // __CCE_RUNTIME_MODEL_H__

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示