From 044afa83d1f433cad05baa716c0811a272bb977d Mon Sep 17 00:00:00 2001 From: Kevin Hu Date: Mon, 9 Dec 2024 14:21:37 +0800 Subject: [PATCH] Fix transformers dependencies for slim. (#3934) ### What problem does this PR solve? ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- rag/llm/cv_model.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rag/llm/cv_model.py b/rag/llm/cv_model.py index 48a55b67472..74127b3cb5c 100644 --- a/rag/llm/cv_model.py +++ b/rag/llm/cv_model.py @@ -25,7 +25,6 @@ from io import BytesIO import json import requests -from transformers import GenerationConfig from rag.nlp import is_english from api.utils import get_uuid @@ -510,6 +509,7 @@ def describe(self, image, max_tokens=2048): return res.text,res.usage_metadata.total_token_count def chat(self, system, history, gen_conf, image=""): + from transformers import GenerationConfig if system: history[-1]["content"] = system + history[-1]["content"] + "user query: " + history[-1]["content"] try: @@ -533,6 +533,7 @@ def chat(self, system, history, gen_conf, image=""): return "**ERROR**: " + str(e), 0 def chat_streamly(self, system, history, gen_conf, image=""): + from transformers import GenerationConfig if system: history[-1]["content"] = system + history[-1]["content"] + "user query: " + history[-1]["content"]