From: @wangxiaotian22 Reviewed-by: @xchu42 Signed-off-by:tags/v1.3.0
| @@ -466,7 +466,7 @@ Status AllReduceParallelPass::Run(ComputeGraphPtr graph, const vector<SubgraphPt | |||
| return NOT_CHANGED; | |||
| } | |||
| GELOGI("AllReduceParallelPass is enabled."); | |||
| GELOGI("[Run][AllReduceParallelPass] start"); | |||
| GE_DUMP(graph, "BeforeAllReduceParallel"); | |||
| // All successors of HcomAllReduce. | |||
| @@ -576,7 +576,7 @@ Status LogicalStreamAllocator::Assign(const ComputeGraphPtr &root_graph, const G | |||
| RefreshContinuousStreams(root_graph); | |||
| stream_num = context_.next_stream; | |||
| GELOGI("Assigned logical stream num: %ld.", stream_num); | |||
| GELOGI("[Assign][LogicalStream] At last, stream num: %ld.", stream_num); | |||
| return SUCCESS; | |||
| } | |||
| @@ -608,7 +608,7 @@ Status LogicalStreamAllocator::DoAssign(const ComputeGraphPtr &graph, const Grap | |||
| return status; | |||
| } | |||
| GELOGD("Subgraphs of graph %s", graph->GetName().c_str()); | |||
| GELOGD("[Show][Subgraphs] in graph %s", graph->GetName().c_str()); | |||
| for (const auto &subgraph : subgraphs) { | |||
| if (subgraph != nullptr) { | |||
| GELOGD("subgraph: %s", subgraph->name.c_str()); | |||
| @@ -675,9 +675,9 @@ Status LogicalStreamAllocator::RunPasses(const ComputeGraphPtr &graph, const vec | |||
| Status status = pass->Run(graph, subgraphs, context_); | |||
| if (status == SUCCESS) { | |||
| GELOGD("Stream pass %s return SUCCESS.", pass->GetName().c_str()); | |||
| GELOGD("[Show][Status]Stream pass %s return SUCCESS.", pass->GetName().c_str()); | |||
| } else if (status == NOT_CHANGED) { | |||
| GELOGD("Stream pass %s return NOT_CHANGED.", pass->GetName().c_str()); | |||
| GELOGD("[Show][Status]Stream pass %s return NOT_CHANGED.", pass->GetName().c_str()); | |||
| } else { | |||
| GELOGE(status, "Stream pass %s failed.", pass->GetName().c_str()); | |||
| return status; | |||
| @@ -508,7 +508,7 @@ BlockMemAssigner::BlockMemAssigner(ComputeGraphPtr compute_graph, const map<stri | |||
| symbol_to_anchors_(symbol_to_anchors), anchor_to_symbol_(anchor_to_symbol), life_time_(0) {} | |||
| BlockMemAssigner::~BlockMemAssigner() { | |||
| GELOGD("blocks_store_ size : %lu", blocks_store_.size()); | |||
| GELOGD("[Destruct][BlockMemAssigner]blocks_store_ size : %lu", blocks_store_.size()); | |||
| for (MemoryBlock *memory_block : blocks_store_) { | |||
| GE_DELETE_NEW_SINGLE(memory_block); | |||
| } | |||
| @@ -2156,7 +2156,7 @@ void BlockMemAssigner::SetOpMemOffset(bool is_zero_copy) { | |||
| Status BlockMemAssigner::Assign() { | |||
| vector<int64_t> ranges; | |||
| if (GetMemoryRanges(ranges) != SUCCESS) { | |||
| GELOGE(FAILED, "GetMemoryRanges Fail!"); | |||
| GELOGE(FAILED, "[Get][MemoryRanges] Fail!"); | |||
| return FAILED; | |||
| } | |||
| GE_IF_BOOL_EXEC(ranges.empty(), return SUCCESS); | |||
| @@ -337,7 +337,7 @@ uint32_t GetContinuousMemoryType(const OpDescPtr &op_desc) { | |||
| } | |||
| if (continuous_type != 0) { | |||
| GELOGI("Current node %s continuous type %d", op_desc->GetName().c_str(), continuous_type); | |||
| GELOGI("[Get][MemType:Continuous]Current node %s, value is %d", op_desc->GetName().c_str(), continuous_type); | |||
| } | |||
| return continuous_type; | |||
| } | |||
| @@ -482,7 +482,7 @@ Status GraphMemoryAssigner::ReAssignContinuousMemory(bool is_loop_graph) { | |||
| "[Assign][Memory:Continuous:Input]fail for node:%s.", node->GetName().c_str()) | |||
| } | |||
| for (auto pair : memory_offset_) { | |||
| GELOGD("After reassign continuous memory, memory type = %ld, mem offset = %zu.", pair.first, | |||
| GELOGD("[Reassign][Memory:Continuous]At last, memory type = %ld, mem offset = %zu.", pair.first, | |||
| pair.second.mem_offset_); | |||
| } | |||
| return ge::SUCCESS; | |||
| @@ -490,13 +490,13 @@ Status GraphMemoryAssigner::ReAssignContinuousMemory(bool is_loop_graph) { | |||
| Status GraphMemoryAssigner::AssignContinuousInputMemory(const ge::NodePtr &node, int64_t &continuous_mem_start, | |||
| int64_t &continuous_mem_size, int64_t memory_type, uint32_t continuous_type, bool reverse_refresh) { | |||
| GELOGI("Current node %s needs continuous input", node->GetName().c_str()); | |||
| GELOGI("[Assign][Memory:Input:Continuous]start for Current node %s", node->GetName().c_str()); | |||
| auto iter = memory_offset_.find(memory_type); | |||
| if (iter == memory_offset_.end()) { | |||
| REPORT_INNER_ERROR("E19999", "find memory offset fail for mem_type:%ld, " | |||
| "when assign continuous input memory for node:%s, ", memory_type, node->GetName().c_str()); | |||
| GELOGE(FAILED, "[Find][MemOffset]fail for mem_type:%ld, when AssignContinuousInputMemory for node:%s", | |||
| memory_type, node->GetName().c_str()); | |||
| memory_type, node->GetName().c_str()); | |||
| return FAILED; | |||
| } | |||
| // The head and tail of hcom continuous input should be added 512 | |||
| @@ -566,9 +566,9 @@ Status GraphMemoryAssigner::AssignContinuousInputMemory(const ge::NodePtr &node, | |||
| auto peer_output_offset = output_list.at(peer_out_data_anchor->GetIdx()); | |||
| output_list.at(peer_out_data_anchor->GetIdx()) = output_list_this.at(out2ins.begin()->first); | |||
| peer_op_desc->SetOutputOffset(output_list); | |||
| GELOGI("Node %s out %d ref in %d input node %s, use output offset %ld update %ld", node->GetName().c_str(), | |||
| out2ins.begin()->first, out2ins.begin()->second, peer_op_desc->GetName().c_str(), | |||
| output_list_this.at(out2ins.begin()->first), peer_output_offset); | |||
| GELOGI("[Update][Offset]Node %s out %d ref in %d input node %s, use output offset %ld update %ld", | |||
| node->GetName().c_str(), out2ins.begin()->first, out2ins.begin()->second, | |||
| peer_op_desc->GetName().c_str(), output_list_this.at(out2ins.begin()->first), peer_output_offset); | |||
| } else { | |||
| GELOGD("Node %s out %d ref in %d input node %s with total ref numbers %zu.", node->GetName().c_str(), | |||
| out2ins.begin()->first, out2ins.begin()->second, peer_op_desc->GetName().c_str(), out2ins.size()); | |||
| @@ -929,8 +929,8 @@ Status GraphMemoryAssigner::AssignReferenceMemory() { | |||
| if (out_op_desc->GetOutputsSize() > output_list.size()) { | |||
| REPORT_INNER_ERROR("E19999", "Output size:%zu more than output offset size:%zu, judge invalid in node:%s " | |||
| "when AssignReferenceMemory", | |||
| out_op_desc->GetOutputsSize(), output_list.size(), node->GetName().c_str()); | |||
| "when AssignReferenceMemory", | |||
| out_op_desc->GetOutputsSize(), output_list.size(), node->GetName().c_str()); | |||
| GELOGE(ge::FAILED, "[Check][InnerData]Output size:%zu more than output offset size:%zu, invalid in node:%s", | |||
| out_op_desc->GetOutputsSize(), output_list.size(), node->GetName().c_str()); | |||
| return ge::FAILED; | |||
| @@ -1671,7 +1671,7 @@ bool GraphMemoryAssigner::AssignContinuousInputMemoryWithAtomicProcessDirectly( | |||
| auto continuous_type = iter->second; | |||
| bool continuous_input = ((continuous_type & kTypeInput) != 0) || ((continuous_type & kTypeInputNoPadding) != 0); | |||
| if (continuous_input) { | |||
| GELOGI("Node %s 's precursor node %s need assign continuous input memory, store node firstly", | |||
| GELOGI("[Store][Node] of %s cause it's precursor node %s need assign continuous input memory", | |||
| input_continuous_node->GetName().c_str(), in_node->GetName().c_str()); | |||
| return false; | |||
| } | |||
| @@ -1681,7 +1681,7 @@ bool GraphMemoryAssigner::AssignContinuousInputMemoryWithAtomicProcessDirectly( | |||
| node_2_continuous_type.emplace(out_node, continuous_type); | |||
| bool continuous_input = ((continuous_type & kTypeInput) != 0) || ((continuous_type & kTypeInputNoPadding) != 0); | |||
| if (continuous_input) { | |||
| GELOGI("Node %s 's succeed node %s need assign continuous input memory, store node firstly", | |||
| GELOGI("[Store][Node] of %s cause it's succeed node %s need assign continuous input memory", | |||
| input_continuous_node->GetName().c_str(), out_node->GetName().c_str()); | |||
| return false; | |||
| } | |||
| @@ -428,7 +428,8 @@ Status AippOp::ConvertRelatedInputNameToRank() { | |||
| if (!convert_flag) { | |||
| string error_msg = "Top name " + related_input_name + "convert rank failed, Please" | |||
| " ensure top name in aipp config is the top name of data node."; | |||
| GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||
| GELOGE(PARAM_INVALID, "[Check][InputParam]%s", error_msg.c_str()); | |||
| REPORT_INPUT_ERROR("E19021", std::vector<std::string>({"reason"}), std::vector<std::string>({error_msg})); | |||
| return PARAM_INVALID; | |||
| } | |||
| @@ -1,4 +1,4 @@ | |||
| /** | |||
| /** | |||
| * Copyright 2020 Huawei Technologies Co., Ltd | |||
| * | |||
| * Licensed under the Apache License, Version 2.0 (the "License"); | |||
| @@ -124,13 +124,15 @@ Status InsertNewOpUtil::CheckInputNamePositionNotRepeat() { | |||
| if (another_item->related_input_name().empty()) { | |||
| string error_msg = "Can not both set related_input_name and related_input_rank!" | |||
| " Please ensure param is the same with the first aipp config(related_input_name)."; | |||
| GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||
| GELOGE(PARAM_INVALID, "[Check][InputParam]%s", error_msg.c_str()); | |||
| REPORT_INPUT_ERROR("E19021", std::vector<std::string>({"reason"}), std::vector<std::string>({error_msg})); | |||
| return PARAM_INVALID; | |||
| } | |||
| if (item->related_input_name() == another_item->related_input_name()) { | |||
| string error_msg = "Can not insert aipp to the same postion! Please ensure related_input_name" | |||
| " param is different in different aipp config."; | |||
| GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||
| GELOGE(PARAM_INVALID, "[Check][InputParam]%s", error_msg.c_str()); | |||
| REPORT_INPUT_ERROR("E19021", std::vector<std::string>({"reason"}), std::vector<std::string>({error_msg})); | |||
| return PARAM_INVALID; | |||
| } | |||
| } | |||
| @@ -150,13 +152,15 @@ Status InsertNewOpUtil::CheckInputRankPositionNoRepeat() { | |||
| if (!another_item->related_input_name().empty()) { | |||
| string error_msg = "Can not both set related_input_rank and related_input_name!" | |||
| " Please ensure param is the same with the first aipp config(related_input_rank)."; | |||
| GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||
| GELOGE(PARAM_INVALID, "[Check][InputParam]%s", error_msg.c_str()); | |||
| REPORT_INPUT_ERROR("E19021", std::vector<std::string>({"reason"}), std::vector<std::string>({error_msg})); | |||
| return PARAM_INVALID; | |||
| } | |||
| if (item->related_input_rank() == another_item->related_input_rank()) { | |||
| string error_msg = "Can not insert aipp to the same postion! Please ensure related_input_rank" | |||
| " param is different in different aipp config."; | |||
| GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||
| GELOGE(PARAM_INVALID, "[Check][InputParam]%s", error_msg.c_str()); | |||
| REPORT_INPUT_ERROR("E19021", std::vector<std::string>({"reason"}), std::vector<std::string>({error_msg})); | |||
| return PARAM_INVALID; | |||
| } | |||
| } | |||
| @@ -212,7 +216,7 @@ Status InsertNewOpUtil::CheckGraph(const ComputeGraphPtr &graph) { | |||
| } | |||
| } | |||
| } | |||
| GE_CHK_LOG_AND_ERRORMSG((aippNodes.size() == 0) || (aippNodes.size() == next_nodes_cnt), | |||
| GE_CHK_LOG_AND_ERRORMSG((aippNodes.size() == 0) || (aippNodes.size() == next_nodes_cnt), | |||
| PARAM_INVALID, | |||
| "Can not config part of outputs of Data node to support AIPP, config all " | |||
| "of the outputs of Data to support AIPP, or config none of them"); | |||