You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

ge_aipp_op.cc 41 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/preprocess/insert_op/ge_aipp_op.h"
  17. #include <memory>
  18. #include <set>
  19. #include <string>
  20. #include <utility>
  21. #include <vector>
  22. #include "base_insert_op.h"
  23. #include "common/dynamic_aipp.h"
  24. #include "common/ge/ge_util.h"
  25. #include "common/util.h"
  26. #include "common/util/error_manager/error_manager.h"
  27. #include "external/graph/operator_factory.h"
  28. #include "framework/common/debug/ge_log.h"
  29. #include "framework/common/ge_inner_error_codes.h"
  30. #include "framework/common/op/ge_op_utils.h"
  31. #include "framework/common/types.h"
  32. #include "framework/omg/omg_inner_types.h"
  33. #include "graph/debug/ge_attr_define.h"
  34. #include "graph/optimize/common/params.h"
  35. #include "graph/utils/graph_utils.h"
  36. #include "graph/utils/node_utils.h"
  37. #include "graph/utils/op_desc_utils.h"
  38. #include "graph/utils/tensor_utils.h"
  39. #include "graph/utils/type_utils.h"
  40. #include "proto/insert_op.pb.h"
  41. #include "graph/common/local_context.h"
  42. #define SAVE_AIPP_ATTR(KEY, SAVE_TYPE) \
  43. do { \
  44. (void)aipp_attrs.SetAttr(#KEY, GeAttrValue::CreateFrom<SAVE_TYPE>(aipp_params_->KEY())); \
  45. } while (0)
  46. #define SAVE_AIPP_ATTR_LIST(KEY, SAVE_TYPE) \
  47. do { \
  48. if (aipp_params_->KEY##_size() > 0) { \
  49. (void)aipp_attrs.SetAttr(#KEY, GeAttrValue::CreateFrom<SAVE_TYPE>(aipp_params_->KEY(0))); \
  50. } \
  51. } while (0)
  52. #define AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(expr, _status, errormsg) \
  53. do { \
  54. bool b = (expr); \
  55. if (!b) { \
  56. GELOGE(_status, errormsg); \
  57. ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {errormsg}); \
  58. return _status; \
  59. } \
  60. } while (0)
  61. namespace {
  62. const int32_t DEFAULT_MATRIX_R0C0_YUV2RGB = 298;
  63. const int32_t DEFAULT_MATRIX_R0C1_YUV2RGB = 0;
  64. const int32_t DEFAULT_MATRIX_R0C2_YUV2RGB = 409;
  65. const int32_t DEFAULT_MATRIX_R1C0_YUV2RGB = 298;
  66. const int32_t DEFAULT_MATRIX_R1C1_YUV2RGB = -100;
  67. const int32_t DEFAULT_MATRIX_R1C2_YUV2RGB = -208;
  68. const int32_t DEFAULT_MATRIX_R2C0_YUV2RGB = 298;
  69. const int32_t DEFAULT_MATRIX_R2C1_YUV2RGB = 516;
  70. const int32_t DEFAULT_MATRIX_R2C2_YUV2RGB = 0;
  71. const int32_t DEFAULT_MATRIX_R0C0_RGB2YUV = 66;
  72. const int32_t DEFAULT_MATRIX_R0C1_RGB2YUV = 129;
  73. const int32_t DEFAULT_MATRIX_R0C2_RGB2YUV = 25;
  74. const int32_t DEFAULT_MATRIX_R1C0_RGB2YUV = -38;
  75. const int32_t DEFAULT_MATRIX_R1C1_RGB2YUV = -74;
  76. const int32_t DEFAULT_MATRIX_R1C2_RGB2YUV = 112;
  77. const int32_t DEFAULT_MATRIX_R2C0_RGB2YUV = 112;
  78. const int32_t DEFAULT_MATRIX_R2C1_RGB2YUV = -94;
  79. const int32_t DEFAULT_MATRIX_R2C2_RGB2YUV = -18;
  80. const int32_t DEFAULT_OUTPUT_BIAS_0 = 16;
  81. const int32_t DEFAULT_OUTPUT_BIAS_1 = 128;
  82. const int32_t DEFAULT_OUTPUT_BIAS_2 = 128;
  83. const int32_t DEFAULT_INPUT_BIAS_0 = 16;
  84. const int32_t DEFAULT_INPUT_BIAS_1 = 128;
  85. const int32_t DEFAULT_INPUT_BIAS_2 = 128;
  86. const float DEFAULT_VAR_RECI_CHN = 1.0;
  87. } // namespace
  88. namespace ge {
  89. namespace {
  90. const char *const kMbatchSwitchnName = "mbatch-switch-name";
  91. const char *const kAippConfigPath = "aipp_config_path";
  92. const char *const kCurrentAippIndex = "current_aipp_index";
  93. const char *const kDynamicAippData = "ascend_dynamic_aipp_data";
  94. const uint64_t kMinTransferShape = 3;
  95. const int kAippImageInputIndex = 0;
  96. const int kAippParamsInputIndex = 1;
  97. const int kAippDataOutputIndex = 0;
  98. const int64_t kDynamicDim = -1;
  99. // the `format` must one NCHW or NHWC
  100. Status GetDataDimN(const ge::NodePtr &data_node, ge::Format format, int64_t &batch) {
  101. auto output_desc = NodeUtils::GetOutputDesc(*data_node, 0);
  102. auto shape = output_desc.GetShape().GetDims();
  103. if (shape.size() == kMinTransferShape) {
  104. batch = 1;
  105. return SUCCESS;
  106. }
  107. if (shape.size() == DIM_DEFAULT_SIZE) {
  108. switch (format) {
  109. case FORMAT_NCHW:
  110. batch = shape[NCHW_DIM_N];
  111. return SUCCESS;
  112. case FORMAT_NHWC:
  113. batch = shape[NHWC_DIM_N];
  114. return SUCCESS;
  115. default:
  116. GELOGE(PARAM_INVALID, "Not support data format: %s", TypeUtils::FormatToSerialString(format).c_str());
  117. return PARAM_INVALID;
  118. }
  119. }
  120. string errormsg =
  121. "its shape size must be in range[3,4] which dynamic aipp is linked, "
  122. "maybe this input is not suitable for dynamic aipp";
  123. ErrorManager::GetInstance().ATCReportErrMessage(
  124. "E10001", {"parameter", "value", "reason"},
  125. {data_node->GetName() + " shape size", to_string(shape.size()), errormsg});
  126. GELOGE(PARAM_INVALID, "The shape size of this node [%s] which linked dynamic aipp must be in range[3, 4], but is %zu",
  127. data_node->GetName().c_str(), shape.size());
  128. return PARAM_INVALID;
  129. }
  130. // the batch_count must be more than 0
  131. int64_t CalcMaxSize(int64_t batch_count) {
  132. batch_count--;
  133. if (batch_count > 0) {
  134. if (INT64_MAX / batch_count < static_cast<int64_t>(sizeof(kAippDynamicBatchPara))) {
  135. return -1;
  136. }
  137. }
  138. int64_t size = batch_count * sizeof(kAippDynamicBatchPara);
  139. if (INT64_MAX - static_cast<int64_t>(sizeof(kAippDynamicPara)) < size) {
  140. return -1;
  141. }
  142. return size + sizeof(kAippDynamicPara);
  143. }
  144. Format GetAndCheckFormat() {
  145. switch (GetLocalOmgContext().format) {
  146. case domi::DOMI_TENSOR_NCHW:
  147. return FORMAT_NCHW;
  148. case domi::DOMI_TENSOR_NHWC:
  149. return FORMAT_NHWC;
  150. default:
  151. GELOGE(PARAM_INVALID, "Unexpected format found %d", static_cast<int>(GetLocalOmgContext().format));
  152. return FORMAT_ND;
  153. }
  154. }
  155. } // namespace
  156. Status AippOp::Init(domi::AippOpParams *aipp_params) {
  157. aipp_params_ = new (std::nothrow) domi::AippOpParams();
  158. if (aipp_params_ == nullptr) {
  159. return FAILED;
  160. }
  161. aipp_params_->CopyFrom(*aipp_params);
  162. return SUCCESS;
  163. }
  164. AippOp::~AippOp() {
  165. if (aipp_params_ != nullptr) {
  166. delete aipp_params_;
  167. aipp_params_ = nullptr;
  168. }
  169. }
  170. Status AippOp::InsertAippToGraph(ComputeGraphPtr &graph, std::string &aippConfigPath, const uint32_t index) {
  171. GE_CHECK_NOTNULL(graph);
  172. NodePtr target_input = nullptr;
  173. std::vector<std::pair<OutDataAnchorPtr, InDataAnchorPtr>> target_edges;
  174. GE_CHK_STATUS_RET(this->GetTargetPosition(graph, target_input, target_edges), "Get data nodes position failed");
  175. std::map<OutDataAnchorPtr, NodePtr> out_anchors_to_aipp;
  176. for (auto &out_in_anchors : target_edges) {
  177. auto iter = out_anchors_to_aipp.find(out_in_anchors.first);
  178. if (iter == out_anchors_to_aipp.end()) {
  179. auto aipp = CreateAipp(out_in_anchors.first, aippConfigPath, index);
  180. GE_CHECK_NOTNULL(aipp);
  181. out_anchors_to_aipp[out_in_anchors.first] = aipp;
  182. auto ret = GraphUtils::InsertNodeBetweenDataAnchors(out_in_anchors.first, out_in_anchors.second, aipp);
  183. if (ret != GRAPH_SUCCESS) {
  184. GELOGE(INTERNAL_ERROR, "Failed to link edges for aipp node %s", aipp->GetName().c_str());
  185. return INTERNAL_ERROR;
  186. }
  187. // add aipp data if needed
  188. if (GetAippMode() == domi::AippOpParams::dynamic) {
  189. ret = CreateAippData(aipp);
  190. if (ret != SUCCESS) {
  191. GELOGE(INTERNAL_ERROR, "Failed to create aipp data for aipp %s data %s", aipp->GetName().c_str(),
  192. out_in_anchors.first->GetOwnerNode()->GetName().c_str());
  193. return INTERNAL_ERROR;
  194. }
  195. }
  196. GELOGI("Create aipp %s and insert it to the graph", aipp->GetName().c_str());
  197. } else {
  198. out_in_anchors.second->UnlinkAll();
  199. auto &aipp = iter->second;
  200. auto ret = out_in_anchors.second->LinkFrom(aipp->GetOutDataAnchor(0));
  201. if (ret != GRAPH_SUCCESS) {
  202. GELOGE(INTERNAL_ERROR, "Failed to link aipp %s to the peer node %s", aipp->GetName().c_str(),
  203. out_in_anchors.second->GetOwnerNode()->GetName().c_str());
  204. return INTERNAL_ERROR;
  205. }
  206. }
  207. }
  208. return SUCCESS;
  209. }
  210. NodePtr AippOp::CreateAipp(const OutDataAnchorPtr &out_anchor, const std::string &aippConfigPath,
  211. const uint32_t &index) {
  212. const auto &node = out_anchor->GetOwnerNode();
  213. std::string current_name = node->GetName() + "_" + std::to_string(out_anchor->GetIdx()) + "_huawei_aipp";
  214. auto aipp_opdesc_ptr = MakeShared<OpDesc>(current_name, AIPP);
  215. if (aipp_opdesc_ptr == nullptr) {
  216. GELOGE(OUT_OF_MEMORY, "Failed to alloc aipp desc, name %s", current_name.c_str());
  217. return nullptr;
  218. }
  219. // Update attributes
  220. if (AddAippAttrbutes(aipp_opdesc_ptr, aippConfigPath, index) != SUCCESS) {
  221. return nullptr;
  222. }
  223. // Update input desc, the output desc will be flushed when InferShape
  224. auto node_desc = out_anchor->GetOwnerNode()->GetOpDesc();
  225. if (node_desc == nullptr) {
  226. return nullptr;
  227. }
  228. auto opdesc_src_data = node_desc->GetOutputDesc(out_anchor->GetIdx());
  229. if (opdesc_src_data.GetDataType() != DT_FLOAT) {
  230. GELOGW("The datatype of data node %s is not FP32", node_desc->GetName().c_str());
  231. opdesc_src_data.SetDataType(DT_FLOAT);
  232. }
  233. // We must get the TensorDesc from the output anchor on the Data node,
  234. // and update the TensorDesc to the input anchor on the Aipp node.
  235. // Because the InferShape function for the Aipp node needs the input tensor format,
  236. // but the InferFormat process before InferShape can not infer the format
  237. // if the tensor on the Aipp has an unknown shape
  238. if (aipp_opdesc_ptr->UpdateInputDesc(kAippImageInputIndex, opdesc_src_data) != GRAPH_SUCCESS) {
  239. GELOGE(INTERNAL_ERROR, "Failed to update the output desc from node %s to aipp %s", node_desc->GetName().c_str(),
  240. aipp_opdesc_ptr->GetName().c_str());
  241. return nullptr;
  242. }
  243. return node->GetOwnerComputeGraph()->AddNode(aipp_opdesc_ptr);
  244. }
  245. Status AippOp::AddAippAttrbutes(const OpDescPtr &op_desc, const std::string &aipp_cfg_path, const uint32_t &index) {
  246. GeAttrValue::NAMED_ATTRS aipp_attr;
  247. ConvertParamToAttr(aipp_attr);
  248. GE_CHK_BOOL_RET_STATUS(AttrUtils::SetNamedAttrs(op_desc, ATTR_NAME_AIPP, aipp_attr), INTERNAL_ERROR,
  249. "Set name attrs for aipp node failed");
  250. GE_CHK_BOOL_RET_STATUS(AttrUtils::SetStr(op_desc, kAippConfigPath, aipp_cfg_path), INTERNAL_ERROR,
  251. "Set config file path attr for aipp node failed");
  252. std::vector<std::string> empty_names;
  253. GE_CHK_BOOL_RET_STATUS(AttrUtils::SetListStr(op_desc, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, empty_names),
  254. INTERNAL_ERROR, "Set ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES attr for aipp node failed");
  255. GE_CHK_BOOL_RET_STATUS(AttrUtils::SetInt(op_desc, kCurrentAippIndex, index), INTERNAL_ERROR,
  256. "Set kCurrentAippIndex attr for aipp node failed");
  257. // add input/output desc
  258. GeTensorDesc tensor;
  259. GE_CHK_GRAPH_STATUS_RET(op_desc->AddInputDesc("images", tensor), "Failed to add input images for aipp node");
  260. if (GetAippMode() == domi::AippOpParams::dynamic) {
  261. GE_CHK_GRAPH_STATUS_RET(op_desc->AddOptionalInputDesc("params", tensor), "Failed to add params for aipp node");
  262. }
  263. GE_CHK_GRAPH_STATUS_RET(op_desc->AddOutputDesc("features", tensor), "Failed to add output features for aipp node");
  264. return SUCCESS;
  265. }
  266. domi::AippOpParams::AippMode AippOp::GetAippMode() { return aipp_params_->aipp_mode(); }
  267. NodePtr AippOp::FindDataByIndex(const ComputeGraphPtr &graph, int rank) {
  268. int64_t data_index = 0;
  269. for (auto &node : graph->GetDirectNode()) {
  270. if (node->GetType() != DATA) {
  271. continue;
  272. }
  273. // For functional multi batch, Skip Data for index.
  274. if (node->GetOpDesc()->HasAttr(ATTR_INSERT_BY_MBATCH)) {
  275. continue;
  276. }
  277. // There is no `index` attribute on the `Data` node when compile in inference scene
  278. // so we can only use the order of all `Data` nodes to infer the data index
  279. if (data_index++ != rank) {
  280. continue;
  281. }
  282. return node;
  283. }
  284. GELOGE(PARAM_INVALID, "Can not find the data node by index %d", rank);
  285. string errormsg = "Can not find the data node by aipp parameter related_input_rank " + to_string(rank);
  286. ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {errormsg});
  287. return nullptr;
  288. }
  289. Status AippOp::GetAndCheckTarget(const ComputeGraphPtr &graph, int rank, NodePtr &target,
  290. std::set<uint32_t> &edge_indexes) {
  291. auto data_node = FindDataByIndex(graph, rank);
  292. if (data_node == nullptr) {
  293. GELOGE(PARAM_INVALID, "Get target input node for rank %d failed", rank);
  294. return PARAM_INVALID;
  295. }
  296. data_node_linked_aipp = data_node;
  297. auto data_opdesc = data_node->GetOpDesc();
  298. GE_CHECK_NOTNULL(data_opdesc);
  299. string set_dt_str;
  300. if (ge::AttrUtils::GetStr(data_opdesc, ATTR_ATC_USER_DEFINE_DATATYPE, set_dt_str)) {
  301. ErrorManager::GetInstance().ATCReportErrMessage("E10034", {"opname"}, {data_opdesc->GetName()});
  302. GELOGE(INTERNAL_ERROR,
  303. "This input op [%s] is linked to aipp, can not be set to fp16, "
  304. "please check your atc parameter --insert_op_conf, --input_fp16_nodes.",
  305. data_opdesc->GetName().c_str());
  306. return PARAM_INVALID;
  307. }
  308. // add dynamic or static attr memsage to data
  309. if (GetAippMode() == domi::AippOpParams::static_) {
  310. (void)AttrUtils::SetStr(data_opdesc, ATTR_DATA_RELATED_AIPP_MODE, "static_aipp");
  311. } else if (GetAippMode() == domi::AippOpParams::dynamic) {
  312. (void)AttrUtils::SetStr(data_opdesc, ATTR_DATA_RELATED_AIPP_MODE, "dynamic_aipp");
  313. }
  314. // In scenario AIPP+CONV2D+POOLING, keep the aipp info to Data, since AIPP disappear after subgraph optimize
  315. GeAttrValue::NAMED_ATTRS aipp_attr;
  316. ConvertParamToAttr(aipp_attr);
  317. if (!AttrUtils::SetNamedAttrs(data_opdesc, ATTR_NAME_AIPP, aipp_attr)) {
  318. GELOGE(INTERNAL_ERROR, "Set name attrs for Data node failed. id: %d", rank);
  319. return INTERNAL_ERROR;
  320. }
  321. if (aipp_params_->input_edge_idx_size() > 0) {
  322. for (auto edge_index : aipp_params_->input_edge_idx()) {
  323. edge_indexes.insert(edge_index);
  324. }
  325. }
  326. if (!edge_indexes.empty() && (*edge_indexes.rbegin() >= data_node->GetOutDataNodes().size())) {
  327. GELOGE(PARAM_INVALID, "input_edge_idx %u should smaller than out edge size of target input %zu",
  328. *edge_indexes.rbegin(), data_node->GetOutDataNodes().size());
  329. string errormsg = "The aipp parameter input_edge_idx should be smaller than the target input's outnodes.";
  330. ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {errormsg});
  331. return PARAM_INVALID;
  332. }
  333. target = data_node;
  334. return GetStaticTargetNode(graph, data_node, target);
  335. }
  336. Status AippOp::GetStaticTargetNode(const ComputeGraphPtr &graph, NodePtr &data_node, NodePtr &target) {
  337. if (GetAippMode() != domi::AippOpParams::static_) {
  338. return SUCCESS;
  339. }
  340. std::string related_node_name;
  341. if (AttrUtils::GetStr(data_node->GetOpDesc(), kMbatchSwitchnName, related_node_name)) {
  342. if (related_node_name.empty()) {
  343. GELOGE(INTERNAL_ERROR, "The data node %s has switchn node flag, but the value is empty",
  344. data_node->GetName().c_str());
  345. return INTERNAL_ERROR;
  346. }
  347. auto switchn = graph->FindNode(related_node_name);
  348. if (switchn == nullptr) {
  349. GELOGE(INTERNAL_ERROR, "The data node %s has switchn node %s, but can not find it on the graph",
  350. data_node->GetName().c_str(), related_node_name.c_str());
  351. return INTERNAL_ERROR;
  352. }
  353. target = switchn;
  354. GELOGI(
  355. "Multi-batch/image size and static aipp for data %s, "
  356. "the aipp node will be insert after %s instead of origin data node",
  357. data_node->GetName().c_str(), switchn->GetName().c_str());
  358. return SUCCESS;
  359. }
  360. const auto out_anchor = data_node->GetOutDataAnchor(0);
  361. for (const auto &in_anchor : out_anchor->GetPeerInDataAnchors()) {
  362. if (in_anchor == nullptr) {
  363. continue;
  364. }
  365. const auto &case_node = in_anchor->GetOwnerNode();
  366. if (case_node->GetType() == CASE) {
  367. target = case_node;
  368. return SUCCESS;
  369. }
  370. }
  371. return SUCCESS;
  372. }
  373. Status AippOp::GetTargetPosition(ComputeGraphPtr graph, NodePtr &target_input,
  374. std::vector<std::pair<OutDataAnchorPtr, InDataAnchorPtr>> &target_edges) {
  375. GE_CHECK_NOTNULL(graph);
  376. GE_CHECK_NOTNULL(aipp_params_);
  377. std::set<uint32_t> edge_indexes;
  378. const uint32_t related_input_rank = aipp_params_->related_input_rank();
  379. auto ret = GetAndCheckTarget(graph, related_input_rank, target_input, edge_indexes);
  380. if (ret != SUCCESS) {
  381. GELOGE(ret, "Get target input node for rank %u failed", related_input_rank);
  382. return ret;
  383. }
  384. target_edges.clear();
  385. if (target_input->GetType() != CASE) {
  386. for (OutDataAnchorPtr &src_out : target_input->GetAllOutDataAnchors()) {
  387. auto dst_ins = src_out->GetPeerInDataAnchors();
  388. for (uint32_t i = 0; i < dst_ins.size(); ++i) {
  389. auto dst_in = dst_ins.at(i);
  390. if (edge_indexes.empty() || edge_indexes.count(i) > 0) {
  391. target_edges.emplace_back(src_out, dst_in);
  392. }
  393. }
  394. }
  395. } else {
  396. const auto &func_desc = target_input->GetOpDesc();
  397. for (const auto &name : func_desc->GetSubgraphInstanceNames()) {
  398. const auto &subgraph = graph->GetSubgraph(name);
  399. if (subgraph == nullptr) {
  400. GELOGE(GE_GRAPH_EMPTY_SUBGRAPH, "Subgraph not found, name: %s", name.c_str());
  401. return GE_GRAPH_EMPTY_SUBGRAPH;
  402. }
  403. auto data_node = FindDataByIndex(subgraph, related_input_rank);
  404. if (data_node == nullptr) {
  405. GELOGE(PARAM_INVALID, "Get target input node for rank %d failed", related_input_rank);
  406. return PARAM_INVALID;
  407. }
  408. for (OutDataAnchorPtr &src_out : data_node->GetAllOutDataAnchors()) {
  409. auto dst_ins = src_out->GetPeerInDataAnchors();
  410. for (uint32_t i = 0; i < dst_ins.size(); ++i) {
  411. auto dst_in = dst_ins.at(i);
  412. if (edge_indexes.empty() || edge_indexes.count(i) > 0) {
  413. target_edges.emplace_back(src_out, dst_in);
  414. }
  415. }
  416. }
  417. }
  418. }
  419. return SUCCESS;
  420. }
  421. Status AippOp::SetDefaultParams() {
  422. GE_CHECK_NOTNULL(aipp_params_);
  423. const domi::AippOpParams::AippMode aipp_mode = aipp_params_->aipp_mode();
  424. if (aipp_mode == domi::AippOpParams::static_) {
  425. if (aipp_params_->csc_switch()) {
  426. SetCscDefaultValue();
  427. }
  428. SetDtcDefaultValue();
  429. GELOGI("parse aipp params:input_format:%s, csc_switch:%d.",
  430. domi::AippOpParams::InputFormat_Name(aipp_params_->input_format()).c_str(), aipp_params_->csc_switch());
  431. GELOGI("parse aipp params:mean_chn_0:%d, mean_chn_1:%d, mean_chn_2:%d, mean_chn_3:%d.", aipp_params_->mean_chn_0(),
  432. aipp_params_->mean_chn_1(), aipp_params_->mean_chn_2(), aipp_params_->mean_chn_3());
  433. GELOGI("parse aipp params:min_chn_0:%f, min_chn_1:%f, min_chn_2:%f.", aipp_params_->min_chn_0(),
  434. aipp_params_->min_chn_1(), aipp_params_->min_chn_2());
  435. GE_IF_BOOL_EXEC(!aipp_params_->crop(), aipp_params_->set_load_start_pos_h(0); aipp_params_->set_load_start_pos_w(0);
  436. aipp_params_->set_crop_size_h(0); aipp_params_->set_crop_size_w(0););
  437. GE_IF_BOOL_EXEC(!aipp_params_->resize(), aipp_params_->set_resize_output_h(0);
  438. aipp_params_->set_resize_output_w(0););
  439. GE_IF_BOOL_EXEC(!aipp_params_->padding(), aipp_params_->set_left_padding_size(0);
  440. aipp_params_->set_right_padding_size(0); aipp_params_->set_top_padding_size(0);
  441. aipp_params_->set_bottom_padding_size(0););
  442. }
  443. return SUCCESS;
  444. }
  445. Status AippOp::ValidateParams() {
  446. GE_CHECK_NOTNULL(aipp_params_);
  447. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->aipp_mode() != domi::AippOpParams::undefined, PARAM_INVALID,
  448. "When insert AIPP op, aipp_mode must be configured as static or dynamic ");
  449. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->var_reci_chn_0_size() <= 1, PARAM_INVALID,
  450. "The parameter var_reci_chn_0 can not be configed repeatedly");
  451. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->var_reci_chn_1_size() <= 1, PARAM_INVALID,
  452. "The parameter var_reci_chn_1 can not be configed repeatedly");
  453. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->var_reci_chn_2_size() <= 1, PARAM_INVALID,
  454. "The parameter var_reci_chn_2 can not be configed repeatedly");
  455. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->var_reci_chn_3_size() <= 1, PARAM_INVALID,
  456. "The parameter var_reci_chn_3 can not be configed repeatedly");
  457. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r0c0_size() <= 1, PARAM_INVALID,
  458. "The parameter matrix_r0c0 can not be configed repeatedly");
  459. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r0c1_size() <= 1, PARAM_INVALID,
  460. "The parameter matrix_r0c1 can not be configed repeatedly");
  461. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r0c2_size() <= 1, PARAM_INVALID,
  462. "The parameter matrix_r0c2 can not be configed repeatedly");
  463. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r1c0_size() <= 1, PARAM_INVALID,
  464. "The parameter matrix_r1c0 can not be configed repeatedly");
  465. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r1c1_size() <= 1, PARAM_INVALID,
  466. "The parameter matrix_r1c1 can not be configed repeatedly");
  467. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r1c2_size() <= 1, PARAM_INVALID,
  468. "The parameter matrix_r1c2 can not be configed repeatedly");
  469. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r2c0_size() <= 1, PARAM_INVALID,
  470. "The parameter matrix_r2c0 can not be configed repeatedly");
  471. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r2c1_size() <= 1, PARAM_INVALID,
  472. "The parameter matrix_r2c1 can not be configed repeatedly");
  473. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r2c2_size() <= 1, PARAM_INVALID,
  474. "The parameter matrix_r2c2 can not be configed repeatedly");
  475. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->output_bias_0_size() <= 1, PARAM_INVALID,
  476. "The parameter output_bias_0 can not be configed repeatedly");
  477. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->output_bias_1_size() <= 1, PARAM_INVALID,
  478. "The parameter output_bias_1 can not be configed repeatedly");
  479. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->output_bias_2_size() <= 1, PARAM_INVALID,
  480. "The parameter output_bias_2 can not be configed repeatedly");
  481. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->input_bias_0_size() <= 1, PARAM_INVALID,
  482. "The parameter input_bias_0 can not be configed repeatedly");
  483. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->input_bias_1_size() <= 1, PARAM_INVALID,
  484. "The parameter input_bias_1 can not be configed repeatedly");
  485. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->input_bias_2_size() <= 1, PARAM_INVALID,
  486. "The parameter input_bias_2 can not be configed repeatedly");
  487. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->input_edge_idx_size() <= 1, PARAM_INVALID,
  488. "The parameter input_edge_idx can not be configed repeatedly");
  489. const domi::AippOpParams::AippMode aipp_mode = aipp_params_->aipp_mode();
  490. if (aipp_mode == domi::AippOpParams::dynamic) {
  491. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(
  492. aipp_params_->max_src_image_size() > 0, PARAM_INVALID,
  493. "For dynamic AIPP params, max_src_image_size must be set which number should be greater than 0");
  494. } else {
  495. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->input_format() != domi::AippOpParams::UNDEFINED, PARAM_INVALID,
  496. "Input format of AIPP conf is undefined");
  497. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->src_image_size_w() >= 0, PARAM_INVALID,
  498. "Src_image_size_w must not be configed smaller than 0");
  499. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->src_image_size_h() >= 0, PARAM_INVALID,
  500. "Src_image_size_h must not be configed smaller than 0");
  501. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->load_start_pos_w() >= 0, PARAM_INVALID,
  502. "Load_start_pos_w must not be configed smaller than 0");
  503. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->load_start_pos_h() >= 0, PARAM_INVALID,
  504. "Load_start_pos_h must not be configed smaller than 0");
  505. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->crop_size_w() >= 0, PARAM_INVALID,
  506. "Crop_size_w must not be configed smaller than 0");
  507. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->resize_output_w() >= 0, PARAM_INVALID,
  508. "Resize_output_w must not be configed smaller than 0");
  509. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->resize_output_h() >= 0, PARAM_INVALID,
  510. "Resize_output_h must not be configed smaller than 0");
  511. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->left_padding_size() >= 0, PARAM_INVALID,
  512. "Left_padding_size must not be configed smaller than 0");
  513. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->right_padding_size() >= 0, PARAM_INVALID,
  514. "Right_padding_size must not be configed smaller than 0");
  515. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->top_padding_size() >= 0, PARAM_INVALID,
  516. "Top_padding_size must not be configed smaller than 0");
  517. AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->bottom_padding_size() >= 0, PARAM_INVALID,
  518. "Bottom_padding_size must not be configed smaller than 0");
  519. }
  520. return SUCCESS;
  521. }
  522. void AippOp::SetCscDefaultValue() {
  523. GE_CHECK_NOTNULL_JUST_RETURN(aipp_params_);
  524. if (aipp_params_->input_format() == domi::AippOpParams::YUV420SP_U8) {
  525. CHECK_FALSE_EXEC(aipp_params_->matrix_r0c0_size() > 0, aipp_params_->add_matrix_r0c0(DEFAULT_MATRIX_R2C0_YUV2RGB));
  526. CHECK_FALSE_EXEC(aipp_params_->matrix_r0c1_size() > 0, aipp_params_->add_matrix_r0c1(DEFAULT_MATRIX_R2C1_YUV2RGB));
  527. CHECK_FALSE_EXEC(aipp_params_->matrix_r0c2_size() > 0, aipp_params_->add_matrix_r0c2(DEFAULT_MATRIX_R2C2_YUV2RGB));
  528. CHECK_FALSE_EXEC(aipp_params_->matrix_r1c0_size() > 0, aipp_params_->add_matrix_r1c0(DEFAULT_MATRIX_R1C0_YUV2RGB));
  529. CHECK_FALSE_EXEC(aipp_params_->matrix_r1c1_size() > 0, aipp_params_->add_matrix_r1c1(DEFAULT_MATRIX_R1C1_YUV2RGB));
  530. CHECK_FALSE_EXEC(aipp_params_->matrix_r1c2_size() > 0, aipp_params_->add_matrix_r1c2(DEFAULT_MATRIX_R1C2_YUV2RGB));
  531. CHECK_FALSE_EXEC(aipp_params_->matrix_r2c0_size() > 0, aipp_params_->add_matrix_r2c0(DEFAULT_MATRIX_R0C0_YUV2RGB));
  532. CHECK_FALSE_EXEC(aipp_params_->matrix_r2c1_size() > 0, aipp_params_->add_matrix_r2c1(DEFAULT_MATRIX_R0C1_YUV2RGB));
  533. CHECK_FALSE_EXEC(aipp_params_->matrix_r2c2_size() > 0, aipp_params_->add_matrix_r2c2(DEFAULT_MATRIX_R0C2_YUV2RGB));
  534. } else {
  535. CHECK_FALSE_EXEC(aipp_params_->matrix_r0c0_size() > 0, aipp_params_->add_matrix_r0c0(DEFAULT_MATRIX_R0C0_RGB2YUV));
  536. CHECK_FALSE_EXEC(aipp_params_->matrix_r0c1_size() > 0, aipp_params_->add_matrix_r0c1(DEFAULT_MATRIX_R0C1_RGB2YUV));
  537. CHECK_FALSE_EXEC(aipp_params_->matrix_r0c2_size() > 0, aipp_params_->add_matrix_r0c2(DEFAULT_MATRIX_R0C2_RGB2YUV));
  538. CHECK_FALSE_EXEC(aipp_params_->matrix_r1c0_size() > 0, aipp_params_->add_matrix_r1c0(DEFAULT_MATRIX_R1C0_RGB2YUV));
  539. CHECK_FALSE_EXEC(aipp_params_->matrix_r1c1_size() > 0, aipp_params_->add_matrix_r1c1(DEFAULT_MATRIX_R1C1_RGB2YUV));
  540. CHECK_FALSE_EXEC(aipp_params_->matrix_r1c2_size() > 0, aipp_params_->add_matrix_r1c2(DEFAULT_MATRIX_R1C2_RGB2YUV));
  541. CHECK_FALSE_EXEC(aipp_params_->matrix_r2c0_size() > 0, aipp_params_->add_matrix_r2c0(DEFAULT_MATRIX_R2C0_RGB2YUV));
  542. CHECK_FALSE_EXEC(aipp_params_->matrix_r2c1_size() > 0, aipp_params_->add_matrix_r2c1(DEFAULT_MATRIX_R2C1_RGB2YUV));
  543. CHECK_FALSE_EXEC(aipp_params_->matrix_r2c2_size() > 0, aipp_params_->add_matrix_r2c2(DEFAULT_MATRIX_R2C2_RGB2YUV));
  544. }
  545. CHECK_FALSE_EXEC(aipp_params_->input_bias_0_size() > 0, aipp_params_->add_input_bias_0(DEFAULT_INPUT_BIAS_0));
  546. CHECK_FALSE_EXEC(aipp_params_->input_bias_1_size() > 0, aipp_params_->add_input_bias_1(DEFAULT_INPUT_BIAS_1));
  547. CHECK_FALSE_EXEC(aipp_params_->input_bias_2_size() > 0, aipp_params_->add_input_bias_2(DEFAULT_INPUT_BIAS_2));
  548. CHECK_FALSE_EXEC(aipp_params_->output_bias_0_size() > 0, aipp_params_->add_output_bias_0(DEFAULT_OUTPUT_BIAS_0));
  549. CHECK_FALSE_EXEC(aipp_params_->output_bias_1_size() > 0, aipp_params_->add_output_bias_1(DEFAULT_OUTPUT_BIAS_1));
  550. CHECK_FALSE_EXEC(aipp_params_->output_bias_2_size() > 0, aipp_params_->add_output_bias_2(DEFAULT_OUTPUT_BIAS_2));
  551. }
  552. void AippOp::SetDtcDefaultValue() {
  553. GE_CHECK_NOTNULL_JUST_RETURN(aipp_params_);
  554. CHECK_FALSE_EXEC(aipp_params_->var_reci_chn_0_size() > 0, aipp_params_->add_var_reci_chn_0(DEFAULT_VAR_RECI_CHN));
  555. GELOGD("var_reci_chn_0 is %f, size is %u.", DEFAULT_VAR_RECI_CHN, aipp_params_->var_reci_chn_0_size());
  556. CHECK_FALSE_EXEC(aipp_params_->var_reci_chn_1_size() > 0, aipp_params_->add_var_reci_chn_1(DEFAULT_VAR_RECI_CHN));
  557. GELOGD("var_reci_chn_1 is %f, size is %u.", DEFAULT_VAR_RECI_CHN, aipp_params_->var_reci_chn_1_size());
  558. CHECK_FALSE_EXEC(aipp_params_->var_reci_chn_2_size() > 0, aipp_params_->add_var_reci_chn_2(DEFAULT_VAR_RECI_CHN));
  559. GELOGD("var_reci_chn_2 is %f, size is %u.", DEFAULT_VAR_RECI_CHN, aipp_params_->var_reci_chn_2_size());
  560. CHECK_FALSE_EXEC(aipp_params_->var_reci_chn_3_size() > 0, aipp_params_->add_var_reci_chn_3(DEFAULT_VAR_RECI_CHN));
  561. GELOGD("var_reci_chn_3 is %f, size is %u.", DEFAULT_VAR_RECI_CHN, aipp_params_->var_reci_chn_3_size());
  562. }
  563. Status AippOp::GenerateOpDesc(OpDescPtr op_desc) {
  564. GE_CHECK_NOTNULL(op_desc);
  565. static std::atomic_long atomic_op_idx(0);
  566. auto op_idx = atomic_op_idx.fetch_add(1);
  567. op_desc->SetName(std::string("aipp_node").append(std::to_string(op_idx)));
  568. op_desc->SetType(AIPP);
  569. // Add two InputDesc, add the second after the first one is added successfully.
  570. if ((op_desc->AddInputDesc(GeTensorDesc()) != GRAPH_SUCCESS) ||
  571. (op_desc->AddInputDesc(GeTensorDesc()) != GRAPH_SUCCESS)) {
  572. GELOGE(FAILED, "failed to add input desc");
  573. return FAILED;
  574. }
  575. if (op_desc->AddOutputDesc(GeTensorDesc()) != GRAPH_SUCCESS) {
  576. GELOGE(FAILED, "add output desc failed.");
  577. return FAILED;
  578. }
  579. GeAttrValue::NAMED_ATTRS aipp_attrs;
  580. ConvertParamToAttr(aipp_attrs);
  581. GE_IF_BOOL_EXEC(!AttrUtils::SetNamedAttrs(op_desc, ATTR_NAME_AIPP, aipp_attrs),
  582. GELOGE(FAILED, "failed to set ATTR_NAME_AIPP");
  583. return FAILED);
  584. return SUCCESS;
  585. }
  586. void AippOp::ConvertParamToAttr(GeAttrValue::NAMED_ATTRS &aipp_attrs) {
  587. GE_CHECK_NOTNULL_JUST_RETURN(aipp_params_);
  588. SAVE_AIPP_ATTR(aipp_mode, GeAttrValue::INT);
  589. SAVE_AIPP_ATTR(related_input_rank, GeAttrValue::INT);
  590. if (aipp_params_->aipp_mode() == domi::AippOpParams::static_) {
  591. SAVE_AIPP_ATTR(input_format, GeAttrValue::INT);
  592. SAVE_AIPP_ATTR(csc_switch, GeAttrValue::BOOL);
  593. SAVE_AIPP_ATTR(crop, GeAttrValue::BOOL);
  594. SAVE_AIPP_ATTR(resize, GeAttrValue::BOOL);
  595. SAVE_AIPP_ATTR(load_start_pos_w, GeAttrValue::INT);
  596. SAVE_AIPP_ATTR(load_start_pos_h, GeAttrValue::INT);
  597. SAVE_AIPP_ATTR(crop_size_w, GeAttrValue::INT);
  598. SAVE_AIPP_ATTR(crop_size_h, GeAttrValue::INT);
  599. SAVE_AIPP_ATTR(resize, GeAttrValue::BOOL);
  600. SAVE_AIPP_ATTR(resize_output_w, GeAttrValue::INT);
  601. SAVE_AIPP_ATTR(resize_output_h, GeAttrValue::INT);
  602. SAVE_AIPP_ATTR(padding, GeAttrValue::BOOL);
  603. SAVE_AIPP_ATTR(left_padding_size, GeAttrValue::INT);
  604. SAVE_AIPP_ATTR(right_padding_size, GeAttrValue::INT);
  605. SAVE_AIPP_ATTR(top_padding_size, GeAttrValue::INT);
  606. SAVE_AIPP_ATTR(bottom_padding_size, GeAttrValue::INT);
  607. SAVE_AIPP_ATTR(src_image_size_w, GeAttrValue::INT);
  608. SAVE_AIPP_ATTR(src_image_size_h, GeAttrValue::INT);
  609. SAVE_AIPP_ATTR(cpadding_value, GeAttrValue::FLOAT);
  610. SAVE_AIPP_ATTR(rbuv_swap_switch, GeAttrValue::BOOL);
  611. SAVE_AIPP_ATTR(ax_swap_switch, GeAttrValue::BOOL);
  612. SAVE_AIPP_ATTR(single_line_mode, GeAttrValue::BOOL);
  613. SAVE_AIPP_ATTR(mean_chn_0, GeAttrValue::INT);
  614. SAVE_AIPP_ATTR(mean_chn_1, GeAttrValue::INT);
  615. SAVE_AIPP_ATTR(mean_chn_2, GeAttrValue::INT);
  616. SAVE_AIPP_ATTR(mean_chn_3, GeAttrValue::INT);
  617. SAVE_AIPP_ATTR(min_chn_0, GeAttrValue::FLOAT);
  618. SAVE_AIPP_ATTR(min_chn_1, GeAttrValue::FLOAT);
  619. SAVE_AIPP_ATTR(min_chn_2, GeAttrValue::FLOAT);
  620. SAVE_AIPP_ATTR(min_chn_3, GeAttrValue::FLOAT);
  621. SAVE_AIPP_ATTR_LIST(var_reci_chn_0, GeAttrValue::FLOAT);
  622. SAVE_AIPP_ATTR_LIST(var_reci_chn_1, GeAttrValue::FLOAT);
  623. SAVE_AIPP_ATTR_LIST(var_reci_chn_2, GeAttrValue::FLOAT);
  624. SAVE_AIPP_ATTR_LIST(var_reci_chn_3, GeAttrValue::FLOAT);
  625. SAVE_AIPP_ATTR_LIST(matrix_r0c0, GeAttrValue::INT);
  626. SAVE_AIPP_ATTR_LIST(matrix_r0c1, GeAttrValue::INT);
  627. SAVE_AIPP_ATTR_LIST(matrix_r0c2, GeAttrValue::INT);
  628. SAVE_AIPP_ATTR_LIST(matrix_r1c0, GeAttrValue::INT);
  629. SAVE_AIPP_ATTR_LIST(matrix_r1c1, GeAttrValue::INT);
  630. SAVE_AIPP_ATTR_LIST(matrix_r1c2, GeAttrValue::INT);
  631. SAVE_AIPP_ATTR_LIST(matrix_r2c0, GeAttrValue::INT);
  632. SAVE_AIPP_ATTR_LIST(matrix_r2c1, GeAttrValue::INT);
  633. SAVE_AIPP_ATTR_LIST(matrix_r2c2, GeAttrValue::INT);
  634. SAVE_AIPP_ATTR_LIST(output_bias_0, GeAttrValue::INT);
  635. SAVE_AIPP_ATTR_LIST(output_bias_1, GeAttrValue::INT);
  636. SAVE_AIPP_ATTR_LIST(output_bias_2, GeAttrValue::INT);
  637. SAVE_AIPP_ATTR_LIST(input_bias_0, GeAttrValue::INT);
  638. SAVE_AIPP_ATTR_LIST(input_bias_1, GeAttrValue::INT);
  639. SAVE_AIPP_ATTR_LIST(input_bias_2, GeAttrValue::INT);
  640. } else {
  641. SAVE_AIPP_ATTR(max_src_image_size, GeAttrValue::INT);
  642. SAVE_AIPP_ATTR(support_rotation, GeAttrValue::BOOL);
  643. }
  644. }
  645. Status AippOp::CreateAippData(const NodePtr &aipp_node) {
  646. GELOGD("Enter add aipp data node process.");
  647. // get previous node, it should be DATA
  648. auto data_node = aipp_node->GetInDataNodes().at(kAippImageInputIndex);
  649. auto data_op_desc = data_node->GetOpDesc();
  650. GE_CHECK_NOTNULL(data_op_desc);
  651. auto ori_data_format = GetAndCheckFormat();
  652. if (ori_data_format != FORMAT_NCHW && ori_data_format != FORMAT_NHWC) {
  653. string format_str = TypeUtils::FormatToSerialString(ori_data_format);
  654. GELOGE(PARAM_INVALID, "when dynamic aipp, input_format must be NCHW or NHWC, but [%s] format is %s",
  655. data_node->GetName().c_str(), format_str.c_str());
  656. string reason = "format must be NCHW or NHWC in dynamic aipp process";
  657. ErrorManager::GetInstance().ATCReportErrMessage("E19014", {"opname", "value", "reason"},
  658. {data_node->GetName(), "format " + format_str, reason});
  659. return PARAM_INVALID;
  660. }
  661. // dynamic aipp shape HWC is not fixed, need to be set -1
  662. int64_t data_shape_n = 0;
  663. // dynamic batch or HW, need acquire N from ATTR_MBATCH_ORIGIN_INPUT_DIMS
  664. if (data_op_desc->HasAttr(ATTR_MBATCH_ORIGIN_INPUT_DIMS)) {
  665. vector<int64_t> origin_input_dims;
  666. (void)AttrUtils::GetListInt(data_op_desc, ATTR_MBATCH_ORIGIN_INPUT_DIMS, origin_input_dims);
  667. if (!origin_input_dims.empty()) {
  668. data_shape_n = origin_input_dims[0];
  669. }
  670. } else {
  671. data_shape_n = data_op_desc->MutableInputDesc(0)->GetShape().GetDim(0);
  672. }
  673. vector<int64_t> dynamic_aipp_linked_data_shape{data_shape_n, kDynamicDim, kDynamicDim, kDynamicDim};
  674. (void)AttrUtils::SetListInt(data_op_desc, ATTR_DYNAMIC_AIPP_INPUT_DIMS, dynamic_aipp_linked_data_shape);
  675. int64_t batch_count = -1;
  676. if (GetDataDimN(data_node, ori_data_format, batch_count) != ge::SUCCESS) {
  677. GELOGE(PARAM_INVALID, "Get data_node dims and transfer to nchw_dims failed!");
  678. return PARAM_INVALID;
  679. }
  680. if (batch_count <= 0) {
  681. GELOGE(PARAM_INVALID, "Batch count %ld is invalid", batch_count);
  682. return PARAM_INVALID;
  683. }
  684. int64_t max_dynamic_aipp_size = CalcMaxSize(batch_count);
  685. if (max_dynamic_aipp_size < 0) {
  686. GELOGE(PARAM_INVALID, "The dynamic aipp size is not positive.");
  687. return PARAM_INVALID;
  688. }
  689. GELOGI("Add aipp input data, batch count is %ld, max_dynamic_aipp_size is %ld", batch_count, max_dynamic_aipp_size);
  690. return AddNodeToGraph(aipp_node, max_dynamic_aipp_size);
  691. }
  692. Status AippOp::AddAttrToAippData(const OpDescPtr &aipp_data_op_desc) {
  693. // Add dynamic aipp config to aipp_data
  694. GeAttrValue::NAMED_ATTRS aipp_attr;
  695. ConvertParamToAttr(aipp_attr);
  696. (void)AttrUtils::SetNamedAttrs(aipp_data_op_desc, ATTR_NAME_AIPP, aipp_attr);
  697. (void)AttrUtils::SetStr(aipp_data_op_desc, ATTR_DATA_RELATED_AIPP_MODE, "dynamic_aipp_conf");
  698. // add node name attr to data linked aipp_data, it can be queried by acl.
  699. GE_CHECK_NOTNULL(data_node_linked_aipp);
  700. auto data_op_desc = data_node_linked_aipp->GetOpDesc();
  701. GE_CHECK_NOTNULL(data_op_desc);
  702. (void)AttrUtils::SetStr(data_op_desc, ATTR_DATA_AIPP_DATA_NAME_MAP, aipp_data_op_desc->GetName());
  703. (void)AttrUtils::SetStr(aipp_data_op_desc, ATTR_DATA_AIPP_DATA_NAME_MAP, data_op_desc->GetName());
  704. return SUCCESS;
  705. }
  706. Status AippOp::AddNodeToGraph(const NodePtr &aipp_node, int64_t max_dynamic_aipp_size) {
  707. static int index = 0;
  708. std::vector<int64_t> input_shape_dim(1, max_dynamic_aipp_size);
  709. GeShape input_shape(input_shape_dim);
  710. // construct input tensor
  711. GeTensorDesc input_tensor(input_shape, FORMAT_ND, DT_UINT8);
  712. TensorUtils::SetReuseInput(input_tensor, false);
  713. TensorUtils::SetSize(input_tensor, max_dynamic_aipp_size);
  714. const ComputeGraphPtr &graph = aipp_node->GetOwnerComputeGraph();
  715. string node_name;
  716. if (index == 0) {
  717. node_name = kDynamicAippData;
  718. } else {
  719. node_name = string(kDynamicAippData) + "_" + to_string(index);
  720. }
  721. ++index;
  722. // new add aipp_data ops for dynamic aipp param input
  723. OpDescPtr op_desc_ptr_data = MakeShared<OpDesc>(node_name, AIPPDATA);
  724. GE_CHECK_NOTNULL(op_desc_ptr_data);
  725. if (AddAttrToAippData(op_desc_ptr_data) != SUCCESS) {
  726. return INTERNAL_ERROR;
  727. }
  728. auto stat1 = op_desc_ptr_data->AddInputDesc(input_tensor);
  729. GeShape output_shape(input_shape_dim);
  730. // construct output tensor
  731. GeTensorDesc output_tensor(output_shape, FORMAT_ND, DT_UINT8);
  732. TensorUtils::SetReuseInput(output_tensor, false);
  733. TensorUtils::SetSize(output_tensor, max_dynamic_aipp_size);
  734. auto stat2 = op_desc_ptr_data->AddOutputDesc(output_tensor);
  735. NodePtr aipp_data_node_ptr = graph->AddNode(op_desc_ptr_data);
  736. GE_CHECK_NOTNULL(aipp_data_node_ptr);
  737. // add node desc for aipp node
  738. auto stat3 = aipp_node->GetOpDesc()->UpdateInputDesc(kAippParamsInputIndex, output_tensor);
  739. if (stat1 != GRAPH_SUCCESS || stat2 != GRAPH_SUCCESS || stat3 != GRAPH_SUCCESS) {
  740. GELOGE(INTERNAL_ERROR, "node process desc failed!");
  741. return INTERNAL_ERROR;
  742. }
  743. // aipp_node should have two input data but now tbe only one input
  744. if (GraphUtils::AddEdge(aipp_data_node_ptr->GetOutDataAnchor(kAippDataOutputIndex),
  745. aipp_node->GetInDataAnchor(kAippParamsInputIndex)) != GRAPH_SUCCESS) {
  746. GELOGE(INTERNAL_ERROR, "Add Anchor anchor between aipp data node and aipp failed!");
  747. return INTERNAL_ERROR;
  748. }
  749. return SUCCESS;
  750. }
  751. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示