From b3f5da8f6ed99c3c87c70ef6a1d7e87481623968 Mon Sep 17 00:00:00 2001 From: nameearly <2741313455@qq.com> Date: Mon, 30 Mar 2026 09:04:54 +0800 Subject: [PATCH] add cleanup if start() of creating a new crawler fails --- deploy/docker/crawler_pool.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/deploy/docker/crawler_pool.py b/deploy/docker/crawler_pool.py index 516d9562a..4f4ae3e6b 100644 --- a/deploy/docker/crawler_pool.py +++ b/deploy/docker/crawler_pool.py @@ -97,7 +97,7 @@ async def get_crawler(cfg: BrowserConfig) -> AsyncWebCrawler: except: pass - return HOT_POOL[sig] + return crawler logger.info(f"❄️ Using cold pool browser (sig={sig[:8]})") return crawler @@ -111,12 +111,17 @@ async def get_crawler(cfg: BrowserConfig) -> AsyncWebCrawler: # Create new in cold pool logger.info(f"🆕 Creating new browser in cold pool (sig={sig[:8]}, mem={mem_pct:.1f}%)") crawler = AsyncWebCrawler(config=cfg, thread_safe=False) - await crawler.start() - crawler.active_requests = 1 - COLD_POOL[sig] = crawler - LAST_USED[sig] = time.time() - USAGE_COUNT[sig] = 1 - return crawler + try: + await crawler.start() + crawler.active_requests = 1 + COLD_POOL[sig] = crawler + LAST_USED[sig] = time.time() + USAGE_COUNT[sig] = 1 + return crawler + except Exception: + # Clean up crawler on start failure to prevent resource leak + await crawler.close() + raise async def release_crawler(crawler: AsyncWebCrawler): """Decrement active request count for a pooled crawler.