From 808632d3b4effd3c0807325b529d0354894c31b1 Mon Sep 17 00:00:00 2001 From: "Yang, Bo" Date: Thu, 2 May 2024 18:35:18 -0700 Subject: [PATCH] [BugFix] Prevent the task of `_force_log` from being garbage collected (#4567) --- vllm/entrypoints/openai/api_server.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/vllm/entrypoints/openai/api_server.py b/vllm/entrypoints/openai/api_server.py index 8b3c5ea9..f9e294af 100644 --- a/vllm/entrypoints/openai/api_server.py +++ b/vllm/entrypoints/openai/api_server.py @@ -4,6 +4,7 @@ import inspect import re from contextlib import asynccontextmanager from http import HTTPStatus +from typing import Any, Set import fastapi import uvicorn @@ -33,6 +34,8 @@ openai_serving_chat: OpenAIServingChat openai_serving_completion: OpenAIServingCompletion logger = init_logger(__name__) +_running_tasks: Set[asyncio.Task[Any]] = set() + @asynccontextmanager async def lifespan(app: fastapi.FastAPI): @@ -43,7 +46,9 @@ async def lifespan(app: fastapi.FastAPI): await engine.do_log_stats() if not engine_args.disable_log_stats: - asyncio.create_task(_force_log()) + task = asyncio.create_task(_force_log()) + _running_tasks.add(task) + task.add_done_callback(_running_tasks.remove) yield