| @@ -58,7 +58,7 @@ Status HybridModelAsyncExecutor::Start(const std::shared_ptr<ModelListener> &lis | |||||
| run_flag_ = true; | run_flag_ = true; | ||||
| listener_ = listener; | listener_ = listener; | ||||
| future_ = std::async([&]() -> Status { | |||||
| future_ = std::async(std::launch::async, [&]() -> Status { | |||||
| GetContext().SetSessionId(executor_->GetContext()->session_id); | GetContext().SetSessionId(executor_->GetContext()->session_id); | ||||
| return RunInternal(); | return RunInternal(); | ||||
| }); | }); | ||||
| @@ -49,7 +49,7 @@ Status CallbackManager::RegisterCallback(rtCallback_t callback, void *user_data) | |||||
| Status CallbackManager::Init() { | Status CallbackManager::Init() { | ||||
| rtContext_t ctx = nullptr; | rtContext_t ctx = nullptr; | ||||
| GE_CHK_RT_RET(rtCtxGetCurrent(&ctx)); | GE_CHK_RT_RET(rtCtxGetCurrent(&ctx)); | ||||
| ret_future_ = std::async([&](rtContext_t context) ->Status { | |||||
| ret_future_ = std::async(std::launch::async, [&](rtContext_t context) ->Status { | |||||
| return CallbackProcess(context); | return CallbackProcess(context); | ||||
| }, ctx); | }, ctx); | ||||
| if (!ret_future_.valid()) { | if (!ret_future_.valid()) { | ||||
| @@ -307,7 +307,7 @@ Status SubgraphExecutor::LaunchTasks() { | |||||
| Status SubgraphExecutor::ScheduleTasks() { | Status SubgraphExecutor::ScheduleTasks() { | ||||
| GELOGD("[%s] Start to schedule prepare workers.", graph_item_->GetName().c_str()); | GELOGD("[%s] Start to schedule prepare workers.", graph_item_->GetName().c_str()); | ||||
| auto prepare_future = std::async([&]() -> Status { | |||||
| auto prepare_future = std::async(std::launch::async, [&]() -> Status { | |||||
| GetContext().SetSessionId(context_->session_id); | GetContext().SetSessionId(context_->session_id); | ||||
| auto ret = PrepareNodes(); | auto ret = PrepareNodes(); | ||||
| ready_queue_.Push(nullptr); | ready_queue_.Push(nullptr); | ||||