You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

ge_tensor.cc 37 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "graph/ge_tensor.h"
  17. #include <cstdlib>
  18. #include <cstring>
  19. #include <iostream>
  20. #include <map>
  21. #include "debug/ge_attr_define.h"
  22. #include "debug/ge_util.h"
  23. #include "framework/common/debug/ge_log.h"
  24. #include "graph/ge_attr_value.h"
  25. #include "graph/model_serialize.h"
  26. #include "proto/ge_ir.pb.h"
  27. #include "utils/attr_utils.h"
  28. #include "utils/ge_ir_utils.h"
  29. #include "utils/tensor_utils.h"
  30. #include "utils/type_utils.h"
  31. namespace ge {
  32. static const char *const kKeyDataTypeSelfDefined = "__tensor_desc_data_type__";
  33. static const std::map<DataType, ::ge::proto::DataType> kDataTypeMap = {
  34. {DT_UNDEFINED, proto::DT_UNDEFINED},
  35. {DT_FLOAT, proto::DT_FLOAT},
  36. {DT_FLOAT16, proto::DT_FLOAT16},
  37. {DT_INT8, proto::DT_INT8},
  38. {DT_UINT8, proto::DT_UINT8},
  39. {DT_INT16, proto::DT_INT16},
  40. {DT_UINT16, proto::DT_UINT16},
  41. {DT_INT32, proto::DT_INT32},
  42. {DT_INT64, proto::DT_INT64},
  43. {DT_UINT32, proto::DT_UINT32},
  44. {DT_UINT64, proto::DT_UINT64},
  45. {DT_BOOL, proto::DT_BOOL},
  46. {DT_DOUBLE, proto::DT_DOUBLE},
  47. {DT_DUAL, proto::DT_DUAL},
  48. {DT_DUAL_SUB_INT8, proto::DT_DUAL_SUB_INT8},
  49. {DT_DUAL_SUB_UINT8, proto::DT_DUAL_SUB_UINT8},
  50. {DT_COMPLEX64, proto::DT_COMPLEX64},
  51. {DT_COMPLEX128, proto::DT_COMPLEX128},
  52. {DT_QINT8, proto::DT_QINT8},
  53. {DT_QINT16, proto::DT_QINT16},
  54. {DT_QINT32, proto::DT_QINT32},
  55. {DT_QUINT8, proto::DT_QUINT8},
  56. {DT_QUINT16, proto::DT_QUINT16},
  57. {DT_RESOURCE, proto::DT_RESOURCE},
  58. {DT_STRING_REF, proto::DT_STRING_REF},
  59. {DT_STRING, proto::DT_STRING},
  60. };
  61. static const std::map<DataType, int> kDataTypeSelfDefinedMap = {
  62. {DT_DUAL, 13}, {DT_DUAL_SUB_INT8, 14}, {DT_DUAL_SUB_UINT8, 15}, {DT_COMPLEX64, 16}, {DT_COMPLEX128, 17},
  63. {DT_QINT8, 18}, {DT_QINT16, 19}, {DT_QINT32, 20}, {DT_QUINT8, 21}, {DT_QUINT16, 22},
  64. };
  65. GeShape::GeShape() { shape_def_.InitDefault(); }
  66. // Default
  67. GeShape::GeShape(std::vector<int64_t> s) : GeShape() {
  68. auto proto_msg = shape_def_.GetProtoMsg();
  69. if (proto_msg != nullptr) {
  70. for (auto i : s) {
  71. proto_msg->add_dim(i);
  72. }
  73. }
  74. }
  75. size_t GeShape::GetDimNum() const {
  76. auto proto_msg = shape_def_.GetProtoMsg();
  77. if (proto_msg != nullptr) {
  78. if (proto_msg->dim_size() >= 0) {
  79. // check whether contain -2, if true, return -1
  80. for (auto i : proto_msg->dim()) {
  81. if (i == UNKNOWN_DIM_NUM) {
  82. return 0;
  83. }
  84. }
  85. return proto_msg->dim_size();
  86. } else {
  87. return 0;
  88. }
  89. }
  90. return 0;
  91. }
  92. int64_t GeShape::GetDim(size_t idx) const {
  93. auto proto_msg = shape_def_.GetProtoMsg();
  94. if (proto_msg != nullptr) {
  95. if (proto_msg->dim_size() > static_cast<int>(idx)) {
  96. return proto_msg->dim(static_cast<int>(idx));
  97. }
  98. }
  99. return 0;
  100. }
  101. graphStatus GeShape::SetDim(size_t idx, int64_t value) {
  102. auto proto_msg = shape_def_.GetProtoMsg();
  103. if (proto_msg != nullptr) {
  104. auto dims = proto_msg->mutable_dim();
  105. GE_CHECK_NOTNULL(dims);
  106. if (dims->empty()) {
  107. GELOGE(GRAPH_FAILED, "shape is empty");
  108. return GRAPH_FAILED;
  109. }
  110. if (static_cast<int>(idx) >= dims->size()) {
  111. GELOGE(GRAPH_FAILED, "idx is out of range");
  112. return GRAPH_FAILED;
  113. }
  114. proto_msg->set_dim(static_cast<int>(idx), value);
  115. }
  116. return GRAPH_SUCCESS;
  117. }
  118. std::vector<int64_t> GeShape::GetDims() const {
  119. vector<int64_t> dims;
  120. auto proto_msg = shape_def_.GetProtoMsg();
  121. if (proto_msg != nullptr) {
  122. for (auto i : proto_msg->dim()) {
  123. dims.push_back(i);
  124. }
  125. }
  126. return dims;
  127. }
  128. std::string GeShape::ToString() const {
  129. auto proto_msg = shape_def_.GetProtoMsg();
  130. if (proto_msg == nullptr) {
  131. return "";
  132. }
  133. std::stringstream ss;
  134. bool first = true;
  135. for (auto i : proto_msg->dim()) {
  136. if (first) {
  137. first = false;
  138. } else {
  139. ss << ",";
  140. }
  141. ss << i;
  142. }
  143. return ss.str();
  144. }
  145. int64_t GeShape::GetShapeSize() const {
  146. int64_t res = 1;
  147. auto proto_msg = shape_def_.GetProtoMsg();
  148. if (proto_msg != nullptr) {
  149. if (proto_msg->dim().empty()) {
  150. return 0;
  151. }
  152. for (auto i : proto_msg->dim()) {
  153. // if unknown shape, return -1
  154. if (i == UNKNOWN_DIM || i == UNKNOWN_DIM_NUM) {
  155. return UNKNOWN_DIM;
  156. }
  157. res *= i;
  158. }
  159. }
  160. return res;
  161. }
  162. ///
  163. /// @brief Check is unknown shape
  164. /// @return bool
  165. /// ///
  166. bool GeShape::IsUnknownShape() const {
  167. auto proto_msg = shape_def_.GetProtoMsg();
  168. if (proto_msg != nullptr) {
  169. for (auto i : proto_msg->dim()) {
  170. if (i < 0) {
  171. return true;
  172. }
  173. }
  174. }
  175. return false;
  176. }
  177. ///
  178. /// @brief Check is a scalar
  179. /// @return bool
  180. ///
  181. bool GeShape::IsScalar() const {
  182. auto proto_msg = shape_def_.GetProtoMsg();
  183. if (proto_msg != nullptr) {
  184. return proto_msg->dim().empty();
  185. }
  186. return false;
  187. }
  188. const string TENSOR_UTILS_SIZE = "size";
  189. const string TENSOR_UTILS_WEIGHT_SIZE = "weight_size";
  190. const string TENSOR_UTILS_REUSE_INPUT = "reuse_input";
  191. const string TENSOR_UTILS_OUTPUT_TENSOR = "output_tensor";
  192. const string TENSOR_UTILS_DEVICE_TYPE = "device_type";
  193. const string TENSOR_UTILS_INPUT_TENSOR = "input_tensor";
  194. const string TENSOR_UTILS_REAL_DIM_CNT = "real_dim_cnt";
  195. const string TENSOR_UTILS_REUSE_INPUT_INDEX = "reuse_input_index";
  196. const string TENSOR_UTILS_DATA_OFFSET = "data_offset";
  197. const string TENSOR_UTILS_CMPS_SIZE = "cmps_size";
  198. const string TENSOR_UTILS_CMPS_TAB = "cmps_tab";
  199. const string TENSOR_UTILS_CMPS_TAB_OFFSET = "cmps_tab_offset";
  200. const string TENSOR_UTILS_CMPSINFO = "cmps_info";
  201. const string TENSOR_UTILS_ALLOFFSET_QUANTIZE_INFO = "alloffset_quantize_info";
  202. const string TENSOR_UTILS_RC = "rc";
  203. const string TENSOR_UTILS_ORIGIN_SHAPE = "origin_shape";
  204. const string TENSOR_UTILS_ORIGIN_FORMAT = "origin_format";
  205. const string TENSOR_UTILS_ORIGIN_DATA_TYPE = "origin_data_type";
  206. const string TENSOR_UTILS_SHAPE_RANGE = "shape_range";
  207. const string TENSOR_UTILS_REF_PORT_INDEX = "ref_port_index";
  208. GeShape::GeShape(const ProtoMsgOwner &proto_owner, proto::ShapeDef *proto_msg) : shape_def_(proto_owner, proto_msg) {}
  209. GeShape::GeShape(const GeShape &other) : GeShape() { shape_def_.CopyValueFrom(other.shape_def_); }
  210. GeShape::GeShape(GeShape &&other) : GeShape() { shape_def_.MoveValueFrom(std::move(other.shape_def_)); }
  211. GeShape &GeShape::operator=(const GeShape &other) {
  212. if (&other != this) {
  213. shape_def_.CopyValueFrom(other.shape_def_);
  214. }
  215. return *this;
  216. }
  217. GeShape &GeShape::operator=(GeShape &&other) {
  218. if (&other != this) {
  219. shape_def_.CopyValueFrom(std::move(other.shape_def_));
  220. }
  221. return *this;
  222. }
  223. GeTensorDesc::GeTensorDesc() {
  224. tensor_descriptor_.InitDefault();
  225. SetDataType(DT_FLOAT);
  226. Init();
  227. }
  228. // Default
  229. GeTensorDesc::GeTensorDesc(GeShape shape, Format format, DataType dt) : GeTensorDesc() {
  230. SetFormat(format);
  231. SetDataType(dt);
  232. ShapeReference() = std::move(shape);
  233. }
  234. // Default
  235. GeTensorDesc::GeTensorDesc(const GeTensorDesc &desc) : GeTensorDesc() {
  236. tensor_descriptor_.CopyValueFrom(desc.tensor_descriptor_);
  237. }
  238. // Default
  239. GeTensorDesc::GeTensorDesc(GeTensorDesc &&desc) : GeTensorDesc() {
  240. tensor_descriptor_.MoveValueFrom(std::move(desc.tensor_descriptor_));
  241. }
  242. GeTensorDesc::GeTensorDesc(const ProtoMsgOwner &proto_owner, proto::TensorDescriptor *proto_msg)
  243. : tensor_descriptor_(proto_owner, proto_msg) {
  244. if (proto_msg != nullptr && !proto_msg->has_out_attr()) {
  245. proto_msg->set_has_out_attr(true);
  246. int64_t size = 0;
  247. (void)AttrUtils::GetInt(this, TENSOR_UTILS_SIZE, size);
  248. proto_msg->set_size(size);
  249. int64_t weight_size = 0;
  250. (void)AttrUtils::GetInt(this, TENSOR_UTILS_WEIGHT_SIZE, weight_size);
  251. proto_msg->set_weight_size(weight_size);
  252. bool reuse_input = false;
  253. (void)AttrUtils::GetBool(this, TENSOR_UTILS_REUSE_INPUT, reuse_input);
  254. proto_msg->set_reuse_input(reuse_input);
  255. bool output_tensor = false;
  256. (void)AttrUtils::GetBool(this, TENSOR_UTILS_OUTPUT_TENSOR, output_tensor);
  257. proto_msg->set_output_tensor(output_tensor);
  258. string device_type = "NPU";
  259. (void)AttrUtils::GetStr(this, TENSOR_UTILS_DEVICE_TYPE, device_type);
  260. proto_msg->set_device_type(device_type);
  261. bool input_tensor = false;
  262. (void)AttrUtils::GetBool(this, TENSOR_UTILS_INPUT_TENSOR, input_tensor);
  263. proto_msg->set_input_tensor(input_tensor);
  264. int64_t real_dim_cnt = 0;
  265. (void)AttrUtils::GetInt(this, TENSOR_UTILS_REAL_DIM_CNT, real_dim_cnt);
  266. proto_msg->set_real_dim_cnt(real_dim_cnt);
  267. int64_t reuse_input_index = 0;
  268. (void)AttrUtils::GetInt(this, TENSOR_UTILS_REUSE_INPUT_INDEX, reuse_input_index);
  269. proto_msg->set_reuse_input_index(reuse_input_index);
  270. int64_t data_offset = 0;
  271. (void)AttrUtils::GetInt(this, TENSOR_UTILS_DATA_OFFSET, data_offset);
  272. proto_msg->set_data_offset(data_offset);
  273. int64_t cmps_size = 0;
  274. (void)AttrUtils::GetInt(this, TENSOR_UTILS_CMPS_SIZE, cmps_size);
  275. proto_msg->set_cmps_size(cmps_size);
  276. string cmps_tab;
  277. (void)AttrUtils::GetStr(this, TENSOR_UTILS_CMPS_TAB, cmps_tab);
  278. proto_msg->set_cmps_tab(cmps_tab);
  279. int64_t cmps_tab_offset = 0;
  280. (void)AttrUtils::GetInt(this, TENSOR_UTILS_CMPS_TAB_OFFSET, cmps_tab_offset);
  281. proto_msg->set_cmps_tab_offset(cmps_tab_offset);
  282. }
  283. }
  284. bool GeTensorDesc::GeTensorDescAttrsAreEqual(const GeTensorDesc &r_ge_tensor_desc) const {
  285. const auto &tensor_descriptor = this->tensor_descriptor_.GetProtoMsg();
  286. const auto &r_tensor_descriptor = r_ge_tensor_desc.tensor_descriptor_.GetProtoMsg();
  287. if ((tensor_descriptor != nullptr) && (r_tensor_descriptor != nullptr)) {
  288. // Message TensorDescriptor in ge_ir.proto
  289. return (
  290. IsEqual(tensor_descriptor->name(), r_tensor_descriptor->name(), "TensorDescriptor.name()") &&
  291. IsEqual(tensor_descriptor->dtype(), r_tensor_descriptor->dtype(), "TensorDescriptor.dtype()") &&
  292. // Message ShapeDef in ge_ir.proto
  293. IsEqual(ToString(tensor_descriptor->shape().dim()), ToString(r_tensor_descriptor->shape().dim()),
  294. "TensorDescriptor.shape().dim()") &&
  295. IsEqual(tensor_descriptor->layout(), r_tensor_descriptor->layout(), "TensorDescriptor.layout()") &&
  296. IsEqual(tensor_descriptor->has_out_attr(), r_tensor_descriptor->has_out_attr(),
  297. "TensorDescriptor.has_out_attr()") &&
  298. IsEqual(tensor_descriptor->size(), r_tensor_descriptor->size(), "TensorDescriptor.size()") &&
  299. IsEqual(tensor_descriptor->weight_size(), r_tensor_descriptor->weight_size(), "TensorDescriptor.weight_size()") &&
  300. IsEqual(tensor_descriptor->reuse_input(), r_tensor_descriptor->reuse_input(), "TensorDescriptor.reuse_input()") &&
  301. IsEqual(tensor_descriptor->output_tensor(), r_tensor_descriptor->output_tensor(),
  302. "TensorDescriptor.output_tensor()") &&
  303. IsEqual(tensor_descriptor->device_type(), r_tensor_descriptor->device_type(), "TensorDescriptor.device_type()") &&
  304. IsEqual(tensor_descriptor->input_tensor(), r_tensor_descriptor->input_tensor(),
  305. "TensorDescriptor.input_tensor()") &&
  306. IsEqual(tensor_descriptor->real_dim_cnt(), r_tensor_descriptor->real_dim_cnt(),
  307. "TensorDescriptor.real_dim_cnt()") &&
  308. IsEqual(tensor_descriptor->reuse_input_index(), r_tensor_descriptor->reuse_input_index(),
  309. "TensorDescriptor.reuse_input_index()") &&
  310. IsEqual(tensor_descriptor->data_offset(), r_tensor_descriptor->data_offset(), "TensorDescriptor.data_offset()") &&
  311. IsEqual(tensor_descriptor->cmps_size(), r_tensor_descriptor->cmps_size(), "TensorDescriptor.cmps_size()") &&
  312. IsEqual(tensor_descriptor->cmps_tab(), r_tensor_descriptor->cmps_tab(), "TensorDescriptor.cmps_tab()") &&
  313. IsEqual(tensor_descriptor->cmps_tab_offset(), r_tensor_descriptor->cmps_tab_offset(),
  314. "TensorDescriptor.cmps_tab_offset()"));
  315. } else {
  316. return ((tensor_descriptor == nullptr) && (r_tensor_descriptor == nullptr));
  317. }
  318. }
  319. bool GeTensorDesc::operator==(const GeTensorDesc &r_ge_tensor_desc) const {
  320. return GeTensorDescAttrsAreEqual(r_ge_tensor_desc);
  321. }
  322. GeShape &GeTensorDesc::ShapeReference() const {
  323. if (tensor_descriptor_.GetProtoMsg() != nullptr) {
  324. GeShape refShape(tensor_descriptor_.GetProtoOwner(), tensor_descriptor_.GetProtoMsg()->mutable_shape());
  325. __shape_.RefTo(refShape);
  326. } else {
  327. GeShape refShape(tensor_descriptor_.GetProtoOwner(), nullptr);
  328. __shape_.RefTo(refShape);
  329. }
  330. return __shape_;
  331. }
  332. void GeTensorDesc::Init() {
  333. SetFormat(FORMAT_ND);
  334. SetOriginFormat(FORMAT_ND);
  335. TensorUtils::SetDeviceType(*this, DeviceType::NPU);
  336. if (tensor_descriptor_.GetProtoMsg() == nullptr) {
  337. GELOGE(GRAPH_FAILED, "ProtoType nullptr.");
  338. return;
  339. }
  340. tensor_descriptor_.GetProtoMsg()->set_has_out_attr(true);
  341. }
  342. ProtoAttrMapHelper GeTensorDesc::MutableAttrMap() {
  343. if (tensor_descriptor_.GetProtoMsg() != nullptr) {
  344. return ProtoAttrMapHelper(tensor_descriptor_.GetProtoOwner(), tensor_descriptor_.GetProtoMsg()->mutable_attr());
  345. }
  346. return ProtoAttrMapHelper(tensor_descriptor_.GetProtoOwner(), nullptr);
  347. }
  348. ConstProtoAttrMapHelper GeTensorDesc::GetAttrMap() const {
  349. if (tensor_descriptor_.GetProtoMsg() != nullptr) {
  350. return ConstProtoAttrMapHelper(tensor_descriptor_.GetProtoOwner(),
  351. tensor_descriptor_.GetProtoMsg()->mutable_attr());
  352. }
  353. return ConstProtoAttrMapHelper(tensor_descriptor_.GetProtoOwner(), nullptr);
  354. }
  355. void GeTensorDesc::Update(GeShape shape, Format format, DataType dt) {
  356. ShapeReference() = std::move(shape);
  357. SetFormat(format);
  358. SetDataType(dt);
  359. }
  360. GeShape GeTensorDesc::GetShape() const { return ShapeReference(); }
  361. GeShape &GeTensorDesc::MutableShape() { return ShapeReference(); }
  362. void GeTensorDesc::SetShape(GeShape shape) { ShapeReference() = std::move(shape); }
  363. // set shape with -2, it stand for unknown shape
  364. void GeTensorDesc::SetUnknownDimNumShape() { SetShape(GeShape({UNKNOWN_DIM_NUM})); }
  365. // for unknown shape
  366. graphStatus GeTensorDesc::SetShapeRange(const std::vector<std::pair<int64_t, int64_t>> &range) {
  367. std::vector<vector<int64_t>> shape_range;
  368. for (const auto &ele : range) {
  369. shape_range.emplace_back(std::vector<int64_t>({ele.first, ele.second}));
  370. }
  371. auto ret = AttrUtils::SetListListInt(this, TENSOR_UTILS_SHAPE_RANGE, shape_range);
  372. return ret ? GRAPH_SUCCESS : GRAPH_FAILED;
  373. }
  374. graphStatus GeTensorDesc::GetShapeRange(std::vector<std::pair<int64_t, int64_t>> &range) const {
  375. std::vector<vector<int64_t>> shape_range;
  376. (void)AttrUtils::GetListListInt(this, TENSOR_UTILS_SHAPE_RANGE, shape_range);
  377. for (const auto &ele : shape_range) {
  378. // here must be only two elemenet because pair
  379. if (ele.size() != 2) {
  380. GELOGE(GRAPH_FAILED, "shape_range must contain only 2 value but really is %lu", ele.size());
  381. return GRAPH_FAILED;
  382. }
  383. std::pair<int64_t, int64_t> pair({ele[0], ele[1]});
  384. range.emplace_back(pair);
  385. }
  386. return GRAPH_SUCCESS;
  387. }
  388. GeShape GeTensorDesc::GetOriginShape() const {
  389. vector<int64_t> origin_shape;
  390. if (!AttrUtils::GetListInt(this, TENSOR_UTILS_ORIGIN_SHAPE, origin_shape)) {
  391. return GeShape();
  392. }
  393. return GeShape(origin_shape);
  394. }
  395. void GeTensorDesc::SetOriginShape(const GeShape &origin_shape) {
  396. std::vector<int64_t> origin_shape_tmp = origin_shape.GetDims();
  397. (void)AttrUtils::SetListInt(this, TENSOR_UTILS_ORIGIN_SHAPE, origin_shape_tmp);
  398. }
  399. Format GeTensorDesc::GetFormat() const {
  400. auto tensor_descriptor_msg = tensor_descriptor_.GetProtoMsg();
  401. if (tensor_descriptor_msg != nullptr) {
  402. return TypeUtils::SerialStringToFormat(tensor_descriptor_msg->layout());
  403. }
  404. return FORMAT_RESERVED;
  405. }
  406. void GeTensorDesc::SetFormat(Format format) {
  407. auto tensor_descriptor_msg = tensor_descriptor_.GetProtoMsg();
  408. if (tensor_descriptor_msg != nullptr) {
  409. tensor_descriptor_msg->set_layout(TypeUtils::FormatToSerialString(format));
  410. }
  411. }
  412. void GeTensorDesc::SetName(const std::string &name) {
  413. auto tensor_descriptor_msg = tensor_descriptor_.GetProtoMsg();
  414. if (tensor_descriptor_msg != nullptr) {
  415. tensor_descriptor_msg->set_name(name);
  416. return;
  417. }
  418. GELOGW("[SetName]tensor_descriptor_msg is null.");
  419. }
  420. const std::string GeTensorDesc::GetName() const {
  421. auto tensor_descriptor_msg = tensor_descriptor_.GetProtoMsg();
  422. if (tensor_descriptor_msg != nullptr) {
  423. return tensor_descriptor_msg->name();
  424. }
  425. GELOGW("[GetName]tensor_descriptor_msg is null.");
  426. return "";
  427. }
  428. Format GeTensorDesc::GetOriginFormat() const {
  429. std::string origin_format_str;
  430. if (!AttrUtils::GetStr(this, TENSOR_UTILS_ORIGIN_FORMAT, origin_format_str)) {
  431. // Can not get the certificate and it's not set, return directly
  432. return FORMAT_RESERVED;
  433. }
  434. if (origin_format_str == "RESERVED") {
  435. return FORMAT_RESERVED;
  436. }
  437. return TypeUtils::SerialStringToFormat(origin_format_str);
  438. }
  439. void GeTensorDesc::SetOriginFormat(Format origin_format) {
  440. std::string origin_format_str = "RESERVED";
  441. if (origin_format != FORMAT_RESERVED) {
  442. origin_format_str = TypeUtils::FormatToSerialString(origin_format);
  443. }
  444. (void)AttrUtils::SetStr(this, TENSOR_UTILS_ORIGIN_FORMAT, origin_format_str);
  445. }
  446. DataType GeTensorDesc::GetDataType() const {
  447. auto tensor_descriptor_msg = tensor_descriptor_.GetProtoMsg();
  448. if (tensor_descriptor_msg == nullptr) {
  449. return DT_UNDEFINED;
  450. }
  451. auto &attr_map = *(tensor_descriptor_msg->mutable_attr());
  452. // Data type
  453. auto it_data_type = attr_map.find(kKeyDataTypeSelfDefined);
  454. if (it_data_type != attr_map.end()) {
  455. int64_t data_type_proto = it_data_type->second.i();
  456. for (auto it : kDataTypeSelfDefinedMap) {
  457. if (it.second == data_type_proto) {
  458. return it.first;
  459. }
  460. }
  461. } else {
  462. auto data_type_proto = tensor_descriptor_msg->dtype();
  463. for (auto it : kDataTypeMap) {
  464. if (it.second == data_type_proto) {
  465. return it.first;
  466. }
  467. }
  468. }
  469. return DT_UNDEFINED;
  470. }
  471. void GeTensorDesc::SetDataType(DataType dataType) {
  472. auto tensor_descriptor_msg = tensor_descriptor_.GetProtoMsg();
  473. if (tensor_descriptor_msg == nullptr) {
  474. return;
  475. }
  476. auto &attr_maps = *(tensor_descriptor_msg->mutable_attr());
  477. (void)attr_maps.erase(kKeyDataTypeSelfDefined);
  478. // Data type
  479. auto it = kDataTypeMap.find(dataType);
  480. if (it != kDataTypeMap.end()) {
  481. tensor_descriptor_msg->set_dtype(it->second);
  482. return;
  483. }
  484. auto it2 = kDataTypeSelfDefinedMap.find(dataType);
  485. if (it2 != kDataTypeSelfDefinedMap.end()) {
  486. attr_maps[kKeyDataTypeSelfDefined].set_i(it2->second);
  487. }
  488. }
  489. void GeTensorDesc::SetOriginDataType(DataType origin_data_type) {
  490. std::string origin_data_type_str = "RESERVED";
  491. if (origin_data_type != DT_UNDEFINED) {
  492. origin_data_type_str = TypeUtils::DataTypeToSerialString(origin_data_type);
  493. }
  494. (void)AttrUtils::SetStr(this, TENSOR_UTILS_ORIGIN_DATA_TYPE, origin_data_type_str);
  495. }
  496. DataType GeTensorDesc::GetOriginDataType() const {
  497. std::string origin_data_type_str;
  498. if (!AttrUtils::GetStr(this, TENSOR_UTILS_ORIGIN_DATA_TYPE, origin_data_type_str)) {
  499. return DT_UNDEFINED;
  500. }
  501. if (origin_data_type_str == "RESERVED") {
  502. return DT_UNDEFINED;
  503. }
  504. return TypeUtils::SerialStringToDataType(origin_data_type_str);
  505. }
  506. std::vector<uint32_t> GeTensorDesc::GetRefPortIndex() const {
  507. vector<uint32_t> ref_port_index;
  508. (void)AttrUtils::GetListInt(this, TENSOR_UTILS_REF_PORT_INDEX, ref_port_index);
  509. return ref_port_index;
  510. }
  511. void GeTensorDesc::SetRefPortByIndex(const std::vector<uint32_t> &index) {
  512. (void)AttrUtils::SetListInt(this, TENSOR_UTILS_REF_PORT_INDEX, index);
  513. }
  514. graphStatus GeTensorDesc::IsValid() const {
  515. auto dtype = this->GetDataType();
  516. auto format = this->GetFormat();
  517. if (dtype == DT_UNDEFINED && format == FORMAT_RESERVED) {
  518. return GRAPH_PARAM_INVALID;
  519. }
  520. return GRAPH_SUCCESS;
  521. }
  522. GeTensorDesc GeTensorDesc::Clone() const { return *this; }
  523. GeTensorDesc &GeTensorDesc::operator=(const GeTensorDesc &desc) {
  524. if (&desc != this) {
  525. tensor_descriptor_.CopyValueFrom(desc.tensor_descriptor_);
  526. }
  527. return *this;
  528. }
  529. GeTensorDesc &GeTensorDesc::operator=(GeTensorDesc &&desc) {
  530. if (&desc != this) {
  531. tensor_descriptor_.CopyValueFrom(std::move(desc.tensor_descriptor_));
  532. }
  533. return *this;
  534. }
  535. GeTensor::GeTensor::GeTensor() {
  536. tensor_def_.InitDefault();
  537. // Default init desc
  538. DescReference() = GeTensorDesc();
  539. }
  540. GeTensor::GeTensor(const GeTensorDesc &tensor_desc) : GeTensor() { DescReference() = tensor_desc; }
  541. GeTensor::GeTensor(const GeTensorDesc &tensor_desc, const vector<uint8_t> &data) : GeTensor() {
  542. DescReference() = tensor_desc;
  543. auto proto_msg = tensor_def_.GetProtoMsg();
  544. if (proto_msg != nullptr) {
  545. proto_msg->set_data(data.data(), data.size());
  546. }
  547. }
  548. GeTensor::GeTensor(const GeTensorDesc &tensor_desc, const uint8_t *data, size_t size) : GeTensor() {
  549. DescReference() = tensor_desc;
  550. auto proto_msg = tensor_def_.GetProtoMsg();
  551. if (proto_msg != nullptr && data != nullptr) {
  552. proto_msg->set_data(data, size);
  553. }
  554. }
  555. GeTensor::GeTensor(GeTensorDesc &&tensor_desc, vector<uint8_t> &&data) : GeTensor() {
  556. DescReference() = std::move(tensor_desc);
  557. auto proto_msg = tensor_def_.GetProtoMsg();
  558. if (proto_msg != nullptr) {
  559. proto_msg->set_data(data.data(), data.size());
  560. }
  561. }
  562. GeTensor::GeTensor(const GeTensorDesc &tensor_desc, const Buffer &data) : GeTensor() {
  563. DescReference() = tensor_desc;
  564. auto proto_msg = tensor_def_.GetProtoMsg();
  565. if (proto_msg != nullptr) {
  566. if (data.size() == 0) {
  567. GELOGI("GetSize res is 0.");
  568. }
  569. if (data.data() == nullptr) {
  570. GELOGI("data addr is null.");
  571. }
  572. proto_msg->set_data(data.GetData(), data.GetSize());
  573. }
  574. }
  575. GeTensor::GeTensor(const ProtoMsgOwner &proto_owner, proto::TensorDef *proto_msg)
  576. : tensor_def_(proto_owner, proto_msg) {}
  577. GeTensorDesc GeTensor::GetTensorDesc() const { return DescReference(); }
  578. GeTensorDesc &GeTensor::MutableTensorDesc() { return DescReference(); }
  579. GeTensorDesc &GeTensor::DescReference() const {
  580. if (tensor_def_.GetProtoMsg() != nullptr) {
  581. GeTensorDesc tensor_desc(tensor_def_.GetProtoOwner(), tensor_def_.GetProtoMsg()->mutable_desc());
  582. __desc_.RefTo(tensor_desc);
  583. } else {
  584. GeTensorDesc tensor_desc(tensor_def_.GetProtoOwner(), nullptr);
  585. __desc_.RefTo(tensor_desc);
  586. }
  587. return __desc_;
  588. }
  589. void GeTensor::SetTensorDesc(const GeTensorDesc &tensor_desc) { DescReference() = tensor_desc; }
  590. const Buffer GeTensor::GetData() const {
  591. auto proto_msg = tensor_def_.GetProtoMsg();
  592. if (proto_msg != nullptr) {
  593. return Buffer(tensor_def_.GetProtoOwner(), proto_msg->mutable_data());
  594. }
  595. return Buffer();
  596. }
  597. Buffer GeTensor::MutableData() {
  598. auto proto_msg = tensor_def_.GetProtoMsg();
  599. if (proto_msg != nullptr) {
  600. return Buffer(tensor_def_.GetProtoOwner(), proto_msg->mutable_data());
  601. }
  602. return Buffer();
  603. }
  604. graphStatus GeTensor::SetData(vector<uint8_t> &&data) {
  605. auto proto_msg = tensor_def_.GetProtoMsg();
  606. GE_CHECK_NOTNULL(proto_msg);
  607. proto_msg->set_data(data.data(), data.size());
  608. return GRAPH_SUCCESS;
  609. }
  610. graphStatus GeTensor::SetData(const vector<uint8_t> &data) {
  611. auto proto_msg = tensor_def_.GetProtoMsg();
  612. GE_CHECK_NOTNULL(proto_msg);
  613. proto_msg->set_data(data.data(), data.size());
  614. return GRAPH_SUCCESS;
  615. }
  616. graphStatus GeTensor::SetData(const uint8_t *data, size_t size) {
  617. GE_CHECK_NOTNULL(data);
  618. auto proto_msg = tensor_def_.GetProtoMsg();
  619. GE_CHECK_NOTNULL(proto_msg);
  620. proto_msg->set_data(data, size);
  621. return GRAPH_SUCCESS;
  622. }
  623. graphStatus GeTensor::SetData(const Buffer &data) {
  624. auto proto_msg = tensor_def_.GetProtoMsg();
  625. GE_CHECK_NOTNULL(proto_msg);
  626. if (data.size() == 0) {
  627. GELOGI("GetSize res is 0.");
  628. }
  629. if (data.data() == nullptr) {
  630. GELOGI("data addr is null.");
  631. }
  632. proto_msg->set_data(data.data(), data.size());
  633. return GRAPH_SUCCESS;
  634. }
  635. GeTensor GeTensor::Clone() const {
  636. GeTensor tensor;
  637. tensor.tensor_def_.CopyValueFrom(tensor_def_);
  638. return tensor;
  639. }
  640. GeTensor::GeTensor(const GeTensor &other) { tensor_def_ = other.tensor_def_; }
  641. GeTensor &GeTensor::operator=(const GeTensor &other) {
  642. if (&other != this) {
  643. tensor_def_ = other.tensor_def_;
  644. }
  645. return *this;
  646. }
  647. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetSize(const GeTensorDesc &tensor_desc,
  648. int64_t &size) {
  649. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  650. GE_CHECK_NOTNULL(tensor_descriptor_msg);
  651. size = static_cast<int64_t>(tensor_descriptor_msg->size());
  652. return GRAPH_SUCCESS;
  653. }
  654. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetSize(GeTensorDesc &tensor_desc, int64_t size) {
  655. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  656. if (tensor_descriptor_msg != nullptr) {
  657. tensor_descriptor_msg->set_size(size);
  658. }
  659. }
  660. uint32_t TensorUtils::GetWeightSize(const GeTensorDesc &tensor_desc) {
  661. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  662. if (tensor_descriptor_msg != nullptr) {
  663. return static_cast<uint32_t>(tensor_descriptor_msg->weight_size());
  664. }
  665. return 0;
  666. }
  667. uint32_t TensorUtils::GetWeightSize(const GeTensor &tensor) { return GetWeightSize(tensor.GetTensorDesc()); }
  668. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY uint32_t TensorUtils::GetWeightSize(const ConstGeTensorPtr &tensor_ptr) {
  669. if (tensor_ptr == nullptr) {
  670. return 0;
  671. }
  672. return GetWeightSize(*tensor_ptr);
  673. }
  674. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY uint8_t *TensorUtils::GetWeightAddr(const ConstGeTensorPtr &tensor_ptr,
  675. uint8_t *base) {
  676. if (tensor_ptr == nullptr) {
  677. GELOGE(GRAPH_FAILED, "tensor_ptr is null.");
  678. return nullptr;
  679. }
  680. return GetWeightAddr(*tensor_ptr, base);
  681. }
  682. uint8_t *TensorUtils::GetWeightAddr(const GeTensor &tensor, uint8_t *base) {
  683. if (base == nullptr) {
  684. GELOGE(GRAPH_FAILED, "base is null.");
  685. return nullptr;
  686. }
  687. int64_t weight_data_offset = 0;
  688. if (GetDataOffset(tensor.GetTensorDesc(), weight_data_offset) != GRAPH_SUCCESS) return nullptr;
  689. if (weight_data_offset == 0) {
  690. // The weight of offset 0 is still in const op, still get from ATTR_NAME_WEIGHTS.
  691. return const_cast<uint8_t *>(tensor.GetData().data());
  692. }
  693. return base + weight_data_offset;
  694. }
  695. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetWeightSize(GeTensorDesc &tensor_desc,
  696. uint32_t size) {
  697. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  698. if (tensor_descriptor_msg != nullptr) {
  699. tensor_descriptor_msg->set_weight_size(size);
  700. }
  701. }
  702. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetReuseInput(const GeTensorDesc &tensor_desc,
  703. bool &flag) {
  704. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  705. GE_CHECK_NOTNULL(tensor_descriptor_msg);
  706. flag = tensor_descriptor_msg->reuse_input();
  707. return GRAPH_SUCCESS;
  708. }
  709. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetReuseInput(GeTensorDesc &tensor_desc, bool flag) {
  710. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  711. if (tensor_descriptor_msg != nullptr) {
  712. tensor_descriptor_msg->set_reuse_input(flag);
  713. }
  714. }
  715. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetOutputTensor(const GeTensorDesc &tensor_desc,
  716. bool &flag) {
  717. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  718. GE_CHECK_NOTNULL(tensor_descriptor_msg);
  719. flag = tensor_descriptor_msg->output_tensor();
  720. return GRAPH_SUCCESS;
  721. }
  722. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetOutputTensor(GeTensorDesc &tensor_desc, bool flag) {
  723. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  724. if (tensor_descriptor_msg != nullptr) {
  725. tensor_descriptor_msg->set_output_tensor(flag);
  726. }
  727. }
  728. static map<uint32_t, string> device_to_str_map{
  729. {0, "NPU"},
  730. {1, "CPU"},
  731. };
  732. static map<string, uint32_t> str_to_device_map{
  733. {"NPU", 0},
  734. {"CPU", 1},
  735. };
  736. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetDeviceType(const GeTensorDesc &tensor_desc,
  737. DeviceType &type) {
  738. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  739. GE_CHECK_NOTNULL(tensor_descriptor_msg);
  740. string type_str = tensor_descriptor_msg->device_type();
  741. type = DeviceType(str_to_device_map[type_str]);
  742. return GRAPH_SUCCESS;
  743. }
  744. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetDeviceType(GeTensorDesc &tensor_desc,
  745. DeviceType type) {
  746. auto type_str = device_to_str_map[type];
  747. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  748. if (tensor_descriptor_msg != nullptr) {
  749. tensor_descriptor_msg->set_device_type(type_str);
  750. }
  751. }
  752. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetInputTensor(const GeTensorDesc &tensor_desc,
  753. bool &flag) {
  754. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  755. GE_CHECK_NOTNULL(tensor_descriptor_msg);
  756. flag = tensor_descriptor_msg->input_tensor();
  757. return GRAPH_SUCCESS;
  758. }
  759. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetInputTensor(GeTensorDesc &tensor_desc, bool flag) {
  760. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  761. if (tensor_descriptor_msg != nullptr) {
  762. tensor_descriptor_msg->set_input_tensor(flag);
  763. }
  764. }
  765. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetRealDimCnt(const GeTensorDesc &tensor_desc,
  766. uint32_t &cnt) {
  767. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  768. GE_CHECK_NOTNULL(tensor_descriptor_msg);
  769. cnt = static_cast<uint32_t>(tensor_descriptor_msg->real_dim_cnt());
  770. return GRAPH_SUCCESS;
  771. }
  772. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetRealDimCnt(GeTensorDesc &tensor_desc,
  773. uint32_t cnt) {
  774. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  775. if (tensor_descriptor_msg != nullptr) {
  776. tensor_descriptor_msg->set_real_dim_cnt(cnt);
  777. }
  778. }
  779. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus
  780. TensorUtils::GetReuseInputIndex(const GeTensorDesc &tensor_desc, uint32_t &idx) {
  781. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  782. GE_CHECK_NOTNULL(tensor_descriptor_msg);
  783. idx = static_cast<uint32_t>(tensor_descriptor_msg->reuse_input_index());
  784. return GRAPH_SUCCESS;
  785. }
  786. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetReuseInputIndex(GeTensorDesc &tensor_desc,
  787. uint32_t idx) {
  788. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  789. if (tensor_descriptor_msg != nullptr) {
  790. tensor_descriptor_msg->set_reuse_input_index(idx);
  791. }
  792. }
  793. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetDataOffset(const GeTensorDesc &tensor_desc,
  794. int64_t &offset) {
  795. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  796. if (tensor_descriptor_msg != nullptr) {
  797. offset = tensor_descriptor_msg->data_offset();
  798. return GRAPH_SUCCESS;
  799. } else {
  800. GELOGW("tensor_descriptor_msg is nullptr.");
  801. return GRAPH_FAILED;
  802. }
  803. }
  804. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetDataOffset(GeTensorDesc &tensor_desc,
  805. int64_t offset) {
  806. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  807. if (tensor_descriptor_msg != nullptr) {
  808. tensor_descriptor_msg->set_data_offset(offset);
  809. }
  810. }
  811. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetCmpsSize(const GeTensorDesc &tensor_desc,
  812. uint32_t &cmp_size) {
  813. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  814. if (tensor_descriptor_msg != nullptr) {
  815. cmp_size = static_cast<uint32_t>(tensor_descriptor_msg->cmps_size());
  816. }
  817. return GRAPH_SUCCESS;
  818. }
  819. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetCmpsSize(GeTensorDesc &tensor_desc,
  820. uint32_t cmp_size) {
  821. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  822. if (tensor_descriptor_msg != nullptr) {
  823. tensor_descriptor_msg->set_cmps_size(cmp_size);
  824. }
  825. }
  826. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetCmpsTab(const GeTensorDesc &tensor_desc,
  827. vector<uint8_t> &vec) {
  828. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  829. if (tensor_descriptor_msg != nullptr) {
  830. string str = tensor_descriptor_msg->cmps_tab();
  831. vec.assign(str.begin(), str.end());
  832. }
  833. return GRAPH_SUCCESS;
  834. }
  835. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetCmpsTab(GeTensorDesc &tensor_desc,
  836. const uint8_t *data, size_t size) {
  837. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  838. if (tensor_descriptor_msg != nullptr) {
  839. GE_CHK_BOOL_EXEC(data != nullptr, return, "data is null.");
  840. string str((const char *)data, size);
  841. tensor_descriptor_msg->set_cmps_tab(str);
  842. }
  843. }
  844. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus
  845. TensorUtils::GetCmpsTabOffset(const GeTensorDesc &tensor_desc, int64_t &tab_offset) {
  846. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  847. if (tensor_descriptor_msg != nullptr) {
  848. tab_offset = tensor_descriptor_msg->cmps_tab_offset();
  849. }
  850. return GRAPH_SUCCESS;
  851. }
  852. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetCmpsTabOffset(GeTensorDesc &tensor_desc,
  853. int64_t tab_offset) {
  854. auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg();
  855. if (tensor_descriptor_msg != nullptr) {
  856. tensor_descriptor_msg->set_cmps_tab_offset(tab_offset);
  857. }
  858. }
  859. graphStatus TensorUtils::GetCmpsInfo(const GeTensorDesc &tensor_desc, CompressInfo &info) {
  860. GeAttrValue attr_value;
  861. if (tensor_desc.GetAttr(TENSOR_UTILS_CMPSINFO, attr_value) != GRAPH_SUCCESS) {
  862. return GRAPH_FAILED;
  863. }
  864. return attr_value.GetValue(info);
  865. }
  866. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetCmpsInfo(GeTensorDesc &tensor_desc,
  867. const CompressInfo &info) {
  868. (void)tensor_desc.SetAttr(TENSOR_UTILS_CMPSINFO, GeAttrValue::CreateFrom(info));
  869. }
  870. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool TensorUtils::HasAlloffsetQuantizeInfo(
  871. const GeTensorDesc &tensor_desc) {
  872. return tensor_desc.HasAttr(TENSOR_UTILS_ALLOFFSET_QUANTIZE_INFO);
  873. }
  874. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus
  875. TensorUtils::GetAlloffsetQuantizeInfo(const GeTensorDesc &tensor_desc, AllOffsetQuantizeInfo &info) {
  876. GeAttrValue attr_value;
  877. if (tensor_desc.GetAttr(TENSOR_UTILS_ALLOFFSET_QUANTIZE_INFO, attr_value) != GRAPH_SUCCESS) {
  878. GELOGW("get attr alloffset_quantize_info fail.");
  879. }
  880. return attr_value.GetValue(info);
  881. }
  882. void TensorUtils::SetAlloffsetQuantizeInfo(GeTensorDesc &tensor_desc, const AllOffsetQuantizeInfo &info) {
  883. (void)tensor_desc.SetAttr(TENSOR_UTILS_ALLOFFSET_QUANTIZE_INFO, GeAttrValue::CreateFrom(info));
  884. }
  885. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetRC(const GeTensorDesc &tensor_desc,
  886. uint32_t &rc) {
  887. return AttrUtils::GetInt(&tensor_desc, TENSOR_UTILS_RC, rc) ? GRAPH_SUCCESS : GRAPH_FAILED;
  888. }
  889. GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetRC(GeTensorDesc &tensor_desc, uint32_t rc) {
  890. (void)AttrUtils::SetInt(&tensor_desc, TENSOR_UTILS_RC, rc);
  891. }
  892. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示