Skip to content

Commit

Permalink
Merge branch 'xtekky:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
kqlio67 authored Nov 29, 2024
2 parents f3f0a84 + 8d5d522 commit 10ca906
Show file tree
Hide file tree
Showing 9 changed files with 173 additions and 93 deletions.
10 changes: 7 additions & 3 deletions docker/Dockerfile-armv7
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@ ENV G4F_USER_ID $G4F_USER_ID
ENV G4F_DIR /app

RUN apt-get update && apt-get upgrade -y \
&& apt-get install -y git \
&& apt-get install -y git curl \
&& apt-get install --quiet --yes --no-install-recommends \
build-essential libffi-dev zlib1g-dev libjpeg-dev \
build-essential libffi-dev zlib1g-dev libjpeg-dev libssl-dev pkg-config \
# Add user and user group
&& groupadd -g $G4F_USER_ID $G4F_USER \
&& useradd -rm -G sudo -u $G4F_USER_ID -g $G4F_USER_ID $G4F_USER \
Expand All @@ -26,7 +26,10 @@ USER $G4F_USER_ID
WORKDIR $G4F_DIR

ENV HOME /home/$G4F_USER
ENV PATH "${HOME}/.local/bin:${PATH}"
ENV PATH "${HOME}/.local/bin:${HOME}/.cargo/bin:${PATH}"

# Install rust toolchain
RUN curl https://sh.rustup.rs -sSf | bash -s -- -y

# Create app dir and copy the project's requirements file into it
RUN mkdir -p $G4F_DIR
Expand Down Expand Up @@ -56,6 +59,7 @@ RUN pip uninstall --yes \
USER root

# Clean up build deps
RUN rm --recursive --force "${HOME}/.rustup"
RUN apt-get purge --auto-remove --yes \
build-essential \
&& apt-get clean \
Expand Down
3 changes: 2 additions & 1 deletion etc/unittest/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,6 @@
from .model import *
from .client import *
from .include import *
from .retry_provider import *

unittest.main()
unittest.main()
32 changes: 30 additions & 2 deletions etc/unittest/mocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,37 @@ def create_completion(

class YieldProviderMock(AsyncGeneratorProvider):
working = True

async def create_async_generator(
model, messages, stream, **kwargs
):
for message in messages:
yield message["content"]
yield message["content"]

class RaiseExceptionProviderMock(AbstractProvider):
working = True

@classmethod
def create_completion(
cls, model, messages, stream, **kwargs
):
raise RuntimeError(cls.__name__)
yield cls.__name__

class AsyncRaiseExceptionProviderMock(AsyncGeneratorProvider):
working = True

@classmethod
async def create_async_generator(
cls, model, messages, stream, **kwargs
):
raise RuntimeError(cls.__name__)
yield cls.__name__

class YieldNoneProviderMock(AsyncGeneratorProvider):
working = True

async def create_async_generator(
model, messages, stream, **kwargs
):
yield None
60 changes: 60 additions & 0 deletions etc/unittest/retry_provider.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
from __future__ import annotations

import unittest

from g4f.client import AsyncClient, ChatCompletion, ChatCompletionChunk
from g4f.providers.retry_provider import IterListProvider
from .mocks import YieldProviderMock, RaiseExceptionProviderMock, AsyncRaiseExceptionProviderMock, YieldNoneProviderMock

DEFAULT_MESSAGES = [{'role': 'user', 'content': 'Hello'}]

class TestIterListProvider(unittest.IsolatedAsyncioTestCase):

async def test_skip_provider(self):
client = AsyncClient(provider=IterListProvider([RaiseExceptionProviderMock, YieldProviderMock], False))
response = await client.chat.completions.create(DEFAULT_MESSAGES, "")
self.assertIsInstance(response, ChatCompletion)
self.assertEqual("Hello", response.choices[0].message.content)

async def test_only_one_result(self):
client = AsyncClient(provider=IterListProvider([YieldProviderMock, YieldProviderMock]))
response = await client.chat.completions.create(DEFAULT_MESSAGES, "")
self.assertIsInstance(response, ChatCompletion)
self.assertEqual("Hello", response.choices[0].message.content)

async def test_stream_skip_provider(self):
client = AsyncClient(provider=IterListProvider([AsyncRaiseExceptionProviderMock, YieldProviderMock], False))
messages = [{'role': 'user', 'content': chunk} for chunk in ["How ", "are ", "you", "?"]]
response = client.chat.completions.create(messages, "Hello", stream=True)
async for chunk in response:
chunk: ChatCompletionChunk = chunk
self.assertIsInstance(chunk, ChatCompletionChunk)
if chunk.choices[0].delta.content is not None:
self.assertIsInstance(chunk.choices[0].delta.content, str)

async def test_stream_only_one_result(self):
client = AsyncClient(provider=IterListProvider([YieldProviderMock, YieldProviderMock], False))
messages = [{'role': 'user', 'content': chunk} for chunk in ["You ", "You "]]
response = client.chat.completions.create(messages, "Hello", stream=True, max_tokens=2)
response_list = []
async for chunk in response:
response_list.append(chunk)
self.assertEqual(len(response_list), 3)
for chunk in response_list:
if chunk.choices[0].delta.content is not None:
self.assertEqual(chunk.choices[0].delta.content, "You ")

async def test_skip_none(self):
client = AsyncClient(provider=IterListProvider([YieldNoneProviderMock, YieldProviderMock], False))
response = await client.chat.completions.create(DEFAULT_MESSAGES, "")
self.assertIsInstance(response, ChatCompletion)
self.assertEqual("Hello", response.choices[0].message.content)

async def test_stream_skip_none(self):
client = AsyncClient(provider=IterListProvider([YieldNoneProviderMock, YieldProviderMock], False))
response = client.chat.completions.create(DEFAULT_MESSAGES, "", stream=True)
response_list = [chunk async for chunk in response]
self.assertEqual(len(response_list), 2)
for chunk in response_list:
if chunk.choices[0].delta.content is not None:
self.assertEqual(chunk.choices[0].delta.content, "Hello")
6 changes: 5 additions & 1 deletion g4f/Provider/needs_auth/OpenaiChat.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
supports_system_message = True
default_model = "auto"
default_vision_model = "gpt-4o"
fallback_models = [default_model, "gpt-4", "gpt-4o", "gpt-4o-mini", "gpt-4o-canmore", "o1-preview", "o1-mini"]
default_image_model = "dall-e-3"
fallback_models = [default_model, "gpt-4", "gpt-4o", "gpt-4o-mini", "gpt-4o-canmore", "o1-preview", "o1-mini", default_image_model]
vision_models = fallback_models
image_models = fallback_models
synthesize_content_type = "audio/mpeg"
Expand All @@ -76,6 +77,7 @@ def get_models(cls):
response.raise_for_status()
data = response.json()
cls.models = [model.get("slug") for model in data.get("models")]
cls.models.append(cls.default_image_model)
except Exception:
cls.models = cls.fallback_models
return cls.models
Expand Down Expand Up @@ -270,6 +272,8 @@ async def create_async_generator(
Raises:
RuntimeError: If an error occurs during processing.
"""
if model == cls.default_image_model:
model = cls.default_model
await cls.login(proxy)

async with StreamSession(
Expand Down
2 changes: 1 addition & 1 deletion g4f/client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -474,7 +474,7 @@ def create(
**kwargs
)

if not isinstance(response, AsyncIterator):
if not hasattr(response, "__aiter__"):
response = to_async_iterator(response)
response = async_iter_response(response, stream, response_format, max_tokens, stop)
response = async_iter_append_model_and_provider(response)
Expand Down
4 changes: 2 additions & 2 deletions g4f/client/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@
from ..models import Model, ModelUtils, default
from ..Provider import ProviderUtils
from ..providers.types import BaseRetryProvider, ProviderType
from ..providers.retry_provider import IterProvider
from ..providers.retry_provider import IterListProvider

def convert_to_provider(provider: str) -> ProviderType:
if " " in provider:
provider_list = [ProviderUtils.convert[p] for p in provider.split() if p in ProviderUtils.convert]
if not provider_list:
raise ProviderNotFoundError(f'Providers not found: {provider}')
provider = IterProvider(provider_list)
provider = IterListProvider(provider_list, False)
elif provider in ProviderUtils.convert:
provider = ProviderUtils.convert[provider]
elif provider:
Expand Down
6 changes: 4 additions & 2 deletions g4f/providers/base_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ async def create_async(
loop = loop or asyncio.get_running_loop()

def create_func() -> str:
return "".join(cls.create_completion(model, messages, False, **kwargs))
chunks = [str(chunk) for chunk in cls.create_completion(model, messages, False, **kwargs) if chunk]
if chunks:
return "".join(chunks)

return await asyncio.wait_for(
loop.run_in_executor(executor, create_func),
Expand Down Expand Up @@ -205,7 +207,7 @@ async def create_async(
"""
return "".join([
str(chunk) async for chunk in cls.create_async_generator(model, messages, stream=False, **kwargs)
if not isinstance(chunk, (Exception, FinishReason, BaseConversation, SynthesizeData))
if chunk and not isinstance(chunk, (Exception, FinishReason, BaseConversation, SynthesizeData))
])

@staticmethod
Expand Down
Loading

0 comments on commit 10ca906

Please sign in to comment.