| @@ -60,7 +60,7 @@ Status InsertReshapeIfNeed(const NodePtr &node) { | |||||
| node->GetName().c_str(), src_anchor->GetIdx(), dst_node->GetName().c_str(), dst_anchor->GetIdx()); | node->GetName().c_str(), src_anchor->GetIdx(), dst_node->GetName().c_str(), dst_anchor->GetIdx()); | ||||
| GE_CHECK_NOTNULL(dst_node); | GE_CHECK_NOTNULL(dst_node); | ||||
| GE_CHECK_NOTNULL(dst_node->GetOpDesc()); | GE_CHECK_NOTNULL(dst_node->GetOpDesc()); | ||||
| auto dst_tensor = dst_node->GetOpDesc()->GetInputDescPtr(dst_anchor->GetIdx()); | |||||
| auto dst_tensor = dst_node->GetOpDesc()->MutableInputDesc(dst_anchor->GetIdx()); | |||||
| GE_CHECK_NOTNULL(dst_tensor); | GE_CHECK_NOTNULL(dst_tensor); | ||||
| bool is_dynamic = false; | bool is_dynamic = false; | ||||
| const auto &src_tensor_dims = src_tensor->GetShape().GetDims(); | const auto &src_tensor_dims = src_tensor->GetShape().GetDims(); | ||||
| @@ -71,6 +71,12 @@ Status InsertReshapeIfNeed(const NodePtr &node) { | |||||
| dst_node->GetName().c_str()); | dst_node->GetName().c_str()); | ||||
| is_dynamic = true; | is_dynamic = true; | ||||
| } | } | ||||
| if (dst_node->GetType() == NETOUTPUT && is_dynamic) { | |||||
| // NetOutput shape must be continuous when dynamic shape. | |||||
| // Otherwise, there may be an arror waiting for the shape refresh to time out during execution. | |||||
| dst_tensor->SetShape(src_tensor->GetShape()); | |||||
| continue; | |||||
| } | |||||
| bool is_need_insert_reshape = src_tensor_dims != dst_tensor_dims && | bool is_need_insert_reshape = src_tensor_dims != dst_tensor_dims && | ||||
| !is_dynamic; | !is_dynamic; | ||||
| if (is_need_insert_reshape) { | if (is_need_insert_reshape) { | ||||
| @@ -216,7 +216,7 @@ Status AiCoreNodeTask::ExecuteAsync(TaskContext &context, std::function<void()> | |||||
| RECORD_EXECUTION_EVENT(context.GetExecutionContext(), context.GetNodeName(), "[AiCoreNodeLaunchKernel] End"); | RECORD_EXECUTION_EVENT(context.GetExecutionContext(), context.GetNodeName(), "[AiCoreNodeLaunchKernel] End"); | ||||
| } | } | ||||
| if (done_callback != nullptr) { | |||||
| if (done_callback != nullptr && !is_single_op_) { | |||||
| RECORD_EXECUTION_EVENT(context.GetExecutionContext(), context.GetNodeName(), "[AiCoreNodeRegisterCallback] Start"); | RECORD_EXECUTION_EVENT(context.GetExecutionContext(), context.GetNodeName(), "[AiCoreNodeRegisterCallback] Start"); | ||||
| GE_CHK_STATUS_RET_NOLOG(context.RegisterCallback(done_callback)); | GE_CHK_STATUS_RET_NOLOG(context.RegisterCallback(done_callback)); | ||||
| RECORD_EXECUTION_EVENT(context.GetExecutionContext(), context.GetNodeName(), "[AiCoreNodeRegisterCallback] End"); | RECORD_EXECUTION_EVENT(context.GetExecutionContext(), context.GetNodeName(), "[AiCoreNodeRegisterCallback] End"); | ||||
| @@ -61,10 +61,12 @@ class AiCoreNodeTask : public NodeTask { | |||||
| const vector<int64_t> &GetWorkspaceSizes() const; | const vector<int64_t> &GetWorkspaceSizes() const; | ||||
| void SetWorkspaceSizes(const vector<int64_t> &workspace_sizes); | void SetWorkspaceSizes(const vector<int64_t> &workspace_sizes); | ||||
| void SetSingleOp(bool is_single_op) {is_single_op_ = is_single_op;} | |||||
| private: | private: | ||||
| Status CheckOverflow(TaskContext &context); | Status CheckOverflow(TaskContext &context); | ||||
| std::vector<std::unique_ptr<AiCoreOpTask>> tasks_; | std::vector<std::unique_ptr<AiCoreOpTask>> tasks_; | ||||
| std::vector<int64_t> workspace_sizes_; | std::vector<int64_t> workspace_sizes_; | ||||
| bool is_single_op_ = false; | |||||
| }; | }; | ||||
| class AiCoreNodeExecutor : public NodeExecutor { | class AiCoreNodeExecutor : public NodeExecutor { | ||||
| @@ -88,6 +88,7 @@ Status AiCoreTaskBuilder::BuildTask(std::unique_ptr<AiCoreNodeTask> &node_task, | |||||
| node_task.reset(new(std::nothrow)AiCoreNodeTask(std::move(op_tasks))); | node_task.reset(new(std::nothrow)AiCoreNodeTask(std::move(op_tasks))); | ||||
| GE_CHECK_NOTNULL(node_task); | GE_CHECK_NOTNULL(node_task); | ||||
| node_task->SetSingleOp(is_single_op); | |||||
| return SUCCESS; | return SUCCESS; | ||||
| } | } | ||||
| @@ -42,8 +42,8 @@ ut::GraphBuilder Graph1Builder() { | |||||
| auto var1 = builder.AddNode("var1", "Variable", 0, 1, FORMAT_ND, DT_FLOAT, {-1}); | auto var1 = builder.AddNode("var1", "Variable", 0, 1, FORMAT_ND, DT_FLOAT, {-1}); | ||||
| auto const1 = builder.AddNode("const1", "Const", 0, 1, FORMAT_ND, DT_FLOAT, {1, 1, 224, 224}); | auto const1 = builder.AddNode("const1", "Const", 0, 1, FORMAT_ND, DT_FLOAT, {1, 1, 224, 224}); | ||||
| auto transdata2 = builder.AddNode("transdata2", "Transdata", 1, 1, FORMAT_ND, DT_FLOAT, {224, 224}); | auto transdata2 = builder.AddNode("transdata2", "Transdata", 1, 1, FORMAT_ND, DT_FLOAT, {224, 224}); | ||||
| auto transdata1 = builder.AddNode("transdata1", "Transdata", 1, 1, FORMAT_ND, DT_FLOAT, {224, 224}); | |||||
| auto netoutput1 = builder.AddNode("netoutput1", "Netoutput", 2, 0); | |||||
| auto transdata1 = builder.AddNode("transdata1", "Transdata", 1, 1, FORMAT_ND, DT_FLOAT, {-1, 224}); | |||||
| auto netoutput1 = builder.AddNode("netoutput1", "NetOutput", 2, 0); | |||||
| builder.AddDataEdge(var1, 0, transdata1, 0); | builder.AddDataEdge(var1, 0, transdata1, 0); | ||||
| builder.AddDataEdge(const1, 0, transdata2, 0); | builder.AddDataEdge(const1, 0, transdata2, 0); | ||||
| @@ -61,7 +61,7 @@ TEST_F(UtestReshapeRecoveryPass, reshape_recovery_with_dynamic_shape) { | |||||
| EXPECT_EQ(graph->GetDirectNodesSize(),5); | EXPECT_EQ(graph->GetDirectNodesSize(),5); | ||||
| Status ret = reshape_recovery_pass.Run(graph); | Status ret = reshape_recovery_pass.Run(graph); | ||||
| EXPECT_EQ(ret, SUCCESS); | EXPECT_EQ(ret, SUCCESS); | ||||
| EXPECT_EQ(graph->GetDirectNodesSize(),8); | |||||
| EXPECT_EQ(graph->GetDirectNodesSize(),7); | |||||
| auto reshape1 = graph->FindNode("Reshape_ReshapeRecoveryPass_0"); | auto reshape1 = graph->FindNode("Reshape_ReshapeRecoveryPass_0"); | ||||
| EXPECT_NE(reshape1, nullptr); | EXPECT_NE(reshape1, nullptr); | ||||