From 03fd5ac99a828bd2637cf5be43a98157113527fb Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Thu, 28 Mar 2024 11:36:25 +0100 Subject: [PATCH] Fix history support for OpenaiChat --- g4f/Provider/needs_auth/OpenaiChat.py | 11 +++++------ g4f/gui/server/api.py | 15 +++++++++------ g4f/gui/server/backend.py | 2 +- 3 files changed, 15 insertions(+), 13 deletions(-) diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py index 331d524a2a6..396d73dd67d 100644 --- a/g4f/Provider/needs_auth/OpenaiChat.py +++ b/g4f/Provider/needs_auth/OpenaiChat.py @@ -389,19 +389,17 @@ async def create_async_generator( print(f"{e.__class__.__name__}: {e}") model = cls.get_model(model).replace("gpt-3.5-turbo", "text-davinci-002-render-sha") - fields = Conversation() if conversation is None else copy(conversation) + fields = Conversation(conversation_id, parent_id) if conversation is None else copy(conversation) fields.finish_reason = None while fields.finish_reason is None: - conversation_id = fields.conversation_id if hasattr(fields, "conversation_id") else conversation_id - parent_id = fields.message_id if hasattr(fields, "message_id") else parent_id websocket_request_id = str(uuid.uuid4()) data = { "action": action, "conversation_mode": {"kind": "primary_assistant"}, "force_paragen": False, "force_rate_limit": False, - "conversation_id": conversation_id, - "parent_message_id": parent_id, + "conversation_id": fields.conversation_id, + "parent_message_id": fields.message_id, "model": model, "history_and_training_disabled": history_disabled and not auto_continue and not return_conversation, "websocket_request_id": websocket_request_id @@ -425,6 +423,7 @@ async def create_async_generator( await raise_for_status(response) async for chunk in cls.iter_messages_chunk(response.iter_lines(), session, fields): if return_conversation: + history_disabled = False return_conversation = False yield fields yield chunk @@ -432,7 +431,7 @@ async def create_async_generator( break action = "continue" await asyncio.sleep(5) - if history_disabled and auto_continue and not return_conversation: + if history_disabled and auto_continue: await cls.delete_conversation(session, cls._headers, fields.conversation_id) @staticmethod diff --git a/g4f/gui/server/api.py b/g4f/gui/server/api.py index da934d57bb6..b4e2b3d403a 100644 --- a/g4f/gui/server/api.py +++ b/g4f/gui/server/api.py @@ -41,7 +41,7 @@ from g4f.Provider.bing.create_images import patch_provider from g4f.providers.conversation import BaseConversation -conversations: dict[str, BaseConversation] = {} +conversations: dict[dict[str, BaseConversation]] = {} class Api(): @@ -106,7 +106,8 @@ def get_conversation(self, options: dict, **kwargs) -> Iterator: kwargs["image"] = open(self.image, "rb") for message in self._create_response_stream( self._prepare_conversation_kwargs(options, kwargs), - options.get("conversation_id") + options.get("conversation_id"), + options.get('provider') ): if not window.evaluate_js(f"if (!this.abort) this.add_message_chunk({json.dumps(message)}); !this.abort && !this.error;"): break @@ -193,8 +194,8 @@ def _prepare_conversation_kwargs(self, json_data: dict, kwargs: dict): messages[-1]["content"] = get_search_message(messages[-1]["content"]) conversation_id = json_data.get("conversation_id") - if conversation_id and conversation_id in conversations: - kwargs["conversation"] = conversations[conversation_id] + if conversation_id and provider in conversations and conversation_id in conversations[provider]: + kwargs["conversation"] = conversations[provider][conversation_id] model = json_data.get('model') model = model if model else models.default @@ -211,7 +212,7 @@ def _prepare_conversation_kwargs(self, json_data: dict, kwargs: dict): **kwargs } - def _create_response_stream(self, kwargs, conversation_id: str) -> Iterator: + def _create_response_stream(self, kwargs: dict, conversation_id: str, provider: str) -> Iterator: """ Creates and returns a streaming response for the conversation. @@ -231,7 +232,9 @@ def _create_response_stream(self, kwargs, conversation_id: str) -> Iterator: first = False yield self._format_json("provider", get_last_provider(True)) if isinstance(chunk, BaseConversation): - conversations[conversation_id] = chunk + if provider not in conversations: + conversations[provider] = {} + conversations[provider][conversation_id] = chunk yield self._format_json("conversation", conversation_id) elif isinstance(chunk, Exception): logging.exception(chunk) diff --git a/g4f/gui/server/backend.py b/g4f/gui/server/backend.py index fb8404d46ab..d30b97d9fa2 100644 --- a/g4f/gui/server/backend.py +++ b/g4f/gui/server/backend.py @@ -85,7 +85,7 @@ def handle_conversation(self): kwargs = self._prepare_conversation_kwargs(json_data, kwargs) return self.app.response_class( - self._create_response_stream(kwargs, json_data.get("conversation_id")), + self._create_response_stream(kwargs, json_data.get("conversation_id"), json_data.get("provider")), mimetype='text/event-stream' )