Skip to content

Commit

Permalink
Merge pull request xtekky#2372 from hlohaus/info
Browse files Browse the repository at this point in the history
Add Microsoft Copilot provider
  • Loading branch information
hlohaus authored Nov 18, 2024
2 parents 56beb19 + 2fe4316 commit b937115
Show file tree
Hide file tree
Showing 17 changed files with 254 additions and 87 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
> <sup><strong>Stats:</strong></sup> [![Downloads](https://static.pepy.tech/badge/g4f)](https://pepy.tech/project/g4f) [![Downloads](https://static.pepy.tech/badge/g4f/month)](https://pepy.tech/project/g4f)
```sh
pip install -U g4f
pip install -U g4f[all]
```

```sh
Expand Down
1 change: 0 additions & 1 deletion etc/unittest/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,5 @@
from .model import *
from .client import *
from .include import *
from .integration import *

unittest.main()
2 changes: 1 addition & 1 deletion etc/unittest/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,4 +46,4 @@ def test_search(self):
self.skipTest(e)
except MissingRequirementsError:
self.skipTest("search is not installed")
self.assertEqual(4, len(result))
self.assertTrue(len(result) >= 4)
33 changes: 18 additions & 15 deletions etc/unittest/integration.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,39 @@
import unittest
import json

try:
import nest_asyncio
has_nest_asyncio = True
except ImportError:
has_nest_asyncio = False

from g4f.client import Client, ChatCompletion
from g4f.Provider import Bing, OpenaiChat
from g4f.client import Client, AsyncClient, ChatCompletion
from g4f.Provider import Copilot, DDG

DEFAULT_MESSAGES = [{"role": "system", "content": 'Response in json, Example: {"success": false}'},
{"role": "user", "content": "Say success true in json"}]

class TestProviderIntegration(unittest.TestCase):
def setUp(self):
if not has_nest_asyncio:
self.skipTest("nest_asyncio is not installed")

def test_bing(self):
self.skipTest("Not working")
client = Client(provider=Bing)
client = Client(provider=Copilot)
response = client.chat.completions.create(DEFAULT_MESSAGES, "", response_format={"type": "json_object"})
self.assertIsInstance(response, ChatCompletion)
self.assertIn("success", json.loads(response.choices[0].message.content))

def test_openai(self):
self.skipTest("not working in this network")
client = Client(provider=OpenaiChat)
client = Client(provider=DDG)
response = client.chat.completions.create(DEFAULT_MESSAGES, "", response_format={"type": "json_object"})
self.assertIsInstance(response, ChatCompletion)
self.assertIn("success", json.loads(response.choices[0].message.content))

class TestChatCompletionAsync(unittest.IsolatedAsyncioTestCase):

async def test_bing(self):
client = AsyncClient(provider=Copilot)
response = await client.chat.completions.create(DEFAULT_MESSAGES, "", response_format={"type": "json_object"})
self.assertIsInstance(response, ChatCompletion)
self.assertIn("success", json.loads(response.choices[0].message.content))

async def test_openai(self):
client = AsyncClient(provider=DDG)
response = await client.chat.completions.create(DEFAULT_MESSAGES, "", response_format={"type": "json_object"})
self.assertIsInstance(response, ChatCompletion)
self.assertIn("success", json.loads(response.choices[0].message.content))

if __name__ == '__main__':
unittest.main()
12 changes: 0 additions & 12 deletions etc/unittest/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,6 @@

DEFAULT_MESSAGES = [{'role': 'user', 'content': 'Hello'}]

class NoTestChatCompletion(unittest.TestCase):

def no_test_create_default(self):
result = ChatCompletion.create(g4f.models.default, DEFAULT_MESSAGES)
if "Good" not in result and "Hi" not in result:
self.assertIn("Hello", result)

def no_test_bing_provider(self):
provider = g4f.Provider.Bing
result = ChatCompletion.create(g4f.models.default, DEFAULT_MESSAGES, provider)
self.assertIn("Bing", result)

class TestGetLastProvider(unittest.TestCase):

def test_get_last_provider(self):
Expand Down
65 changes: 30 additions & 35 deletions g4f/Provider/Blackbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from ..typing import AsyncResult, Messages, ImageType
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..image import ImageResponse, to_data_uri
from .helper import get_random_string

class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
label = "Blackbox AI"
Expand All @@ -22,11 +23,13 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
_last_validated_value = None

default_model = 'blackboxai'
default_vision_model = default_model
default_image_model = 'generate_image'
image_models = [default_image_model, 'repomap']
text_models = [default_model, 'gpt-4o', 'gemini-pro', 'claude-sonnet-3.5', 'blackboxai-pro']
vision_models = [default_model, 'gpt-4o', 'gemini-pro', 'blackboxai-pro']
agentMode = {
'Image Generation': {'mode': True, 'id': "ImageGenerationLV45LJp", 'name': "Image Generation"},
default_image_model: {'mode': True, 'id': "ImageGenerationLV45LJp", 'name': "Image Generation"},
}
trendingAgentMode = {
"gemini-1.5-flash": {'mode': True, 'id': 'Gemini'},
Expand Down Expand Up @@ -111,11 +114,6 @@ async def fetch_validated(cls):

return cls._last_validated_value

@staticmethod
def generate_id(length=7):
characters = string.ascii_letters + string.digits
return ''.join(random.choice(characters) for _ in range(length))

@classmethod
def add_prefix_to_messages(cls, messages: Messages, model: str) -> Messages:
prefix = cls.model_prefixes.get(model, "")
Expand Down Expand Up @@ -143,12 +141,12 @@ async def create_async_generator(
**kwargs
) -> AsyncResult:
model = cls.get_model(model)
message_id = cls.generate_id()
messages_with_prefix = cls.add_prefix_to_messages(messages, model)
message_id = get_random_string(7)
messages = cls.add_prefix_to_messages(messages, model)
validated_value = await cls.fetch_validated()

if image is not None:
messages_with_prefix[-1]['data'] = {
messages[-1]['data'] = {
'fileText': '',
'imageBase64': to_data_uri(image),
'title': image_name
Expand All @@ -171,9 +169,9 @@ async def create_async_generator(
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36'
}

data = {
"messages": messages_with_prefix,
"messages": messages,
"id": message_id,
"previewToken": None,
"userId": None,
Expand All @@ -200,27 +198,24 @@ async def create_async_generator(
async with ClientSession(headers=headers) as session:
async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
response.raise_for_status()
response_text = await response.text()

if model in cls.image_models:
image_matches = re.findall(r'!\[.*?\]\((https?://[^\)]+)\)', response_text)
if image_matches:
image_url = image_matches[0]
image_response = ImageResponse(images=[image_url], alt="Generated Image")
yield image_response
return

response_text = re.sub(r'Generated by BLACKBOX.AI, try unlimited chat https://www.blackbox.ai', '', response_text, flags=re.DOTALL)

json_match = re.search(r'\$~~~\$(.*?)\$~~~\$', response_text, re.DOTALL)
if json_match:
search_results = json.loads(json_match.group(1))
answer = response_text.split('$~~~$')[-1].strip()

formatted_response = f"{answer}\n\n**Source:**"
for i, result in enumerate(search_results, 1):
formatted_response += f"\n{i}. {result['title']}: {result['link']}"

yield formatted_response
else:
yield response_text.strip()
async for chunk in response.content.iter_any():
text_chunk = chunk.decode(errors="ignore")
if model in cls.image_models:
image_matches = re.findall(r'!\[.*?\]\((https?://[^\)]+)\)', text_chunk)
if image_matches:
image_url = image_matches[0]
image_response = ImageResponse(images=[image_url])
yield image_response
continue

text_chunk = re.sub(r'Generated by BLACKBOX.AI, try unlimited chat https://www.blackbox.ai', '', text_chunk, flags=re.DOTALL)
json_match = re.search(r'\$~~~\$(.*?)\$~~~\$', text_chunk, re.DOTALL)
if json_match:
search_results = json.loads(json_match.group(1))
answer = text_chunk.split('$~~~$')[-1].strip()
formatted_response = f"{answer}\n\n**Source:**"
for i, result in enumerate(search_results, 1):
formatted_response += f"\n{i}. {result['title']}: {result['link']}"
yield formatted_response
else:
yield text_chunk.strip()
156 changes: 156 additions & 0 deletions g4f/Provider/Copilot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
from __future__ import annotations

import json
import asyncio
from http.cookiejar import CookieJar
from urllib.parse import quote

try:
from curl_cffi.requests import Session, CurlWsFlag
has_curl_cffi = True
except ImportError:
has_curl_cffi = False
try:
import nodriver
has_nodriver = True
except ImportError:
has_nodriver = False
try:
from platformdirs import user_config_dir
has_platformdirs = True
except ImportError:
has_platformdirs = False

from .base_provider import AbstractProvider, BaseConversation
from .helper import format_prompt
from ..typing import CreateResult, Messages
from ..errors import MissingRequirementsError
from ..requests.raise_for_status import raise_for_status
from .. import debug

class Conversation(BaseConversation):
conversation_id: str
cookie_jar: CookieJar
access_token: str

def __init__(self, conversation_id: str, cookie_jar: CookieJar, access_token: str = None):
self.conversation_id = conversation_id
self.cookie_jar = cookie_jar
self.access_token = access_token

class Copilot(AbstractProvider):
label = "Microsoft Copilot"
url = "https://copilot.microsoft.com"
working = True
supports_stream = True

websocket_url = "wss://copilot.microsoft.com/c/api/chat?api-version=2"
conversation_url = f"{url}/c/api/conversations"

@classmethod
def create_completion(
cls,
model: str,
messages: Messages,
stream: bool = False,
proxy: str = None,
timeout: int = 900,
conversation: Conversation = None,
return_conversation: bool = False,
**kwargs
) -> CreateResult:
if not has_curl_cffi:
raise MissingRequirementsError('Install or update "curl_cffi" package | pip install -U curl_cffi')

websocket_url = cls.websocket_url
access_token = None
headers = None
cookies = conversation.cookie_jar if conversation is not None else None
if cls.needs_auth:
if conversation is None or conversation.access_token is None:
access_token, cookies = asyncio.run(cls.get_access_token_and_cookies(proxy))
else:
access_token = conversation.access_token
websocket_url = f"{websocket_url}&acessToken={quote(access_token)}"
headers = {"Authorization": f"Bearer {access_token}"}

with Session(
timeout=timeout,
proxy=proxy,
impersonate="chrome",
headers=headers,
cookies=cookies
) as session:
response = session.get(f"{cls.url}/")
raise_for_status(response)
if conversation is None:
response = session.post(cls.conversation_url)
raise_for_status(response)
conversation_id = response.json().get("id")
if return_conversation:
yield Conversation(conversation_id, session.cookies.jar, access_token)
prompt = format_prompt(messages)
if debug.logging:
print(f"Copilot: Created conversation: {conversation_id}")
else:
conversation_id = conversation.conversation_id
prompt = messages[-1]["content"]
if debug.logging:
print(f"Copilot: Use conversation: {conversation_id}")

wss = session.ws_connect(cls.websocket_url)
wss.send(json.dumps({
"event": "send",
"conversationId": conversation_id,
"content": [{
"type": "text",
"text": prompt,
}],
"mode": "chat"
}).encode(), CurlWsFlag.TEXT)
while True:
try:
msg = json.loads(wss.recv()[0])
except:
break
if msg.get("event") == "appendText":
yield msg.get("text")
elif msg.get("event") in ["done", "partCompleted"]:
break

@classmethod
async def get_access_token_and_cookies(cls, proxy: str = None):
if not has_nodriver:
raise MissingRequirementsError('Install "nodriver" package | pip install -U nodriver')
if has_platformdirs:
user_data_dir = user_config_dir("g4f-nodriver")
else:
user_data_dir = None
if debug.logging:
print(f"Copilot: Open nodriver with user_dir: {user_data_dir}")
browser = await nodriver.start(
user_data_dir=user_data_dir,
browser_args=None if proxy is None else [f"--proxy-server={proxy}"],
)
page = await browser.get(cls.url)
while True:
access_token = await page.evaluate("""
(() => {
for (var i = 0; i < localStorage.length; i++) {
try {
item = JSON.parse(localStorage.getItem(localStorage.key(i)));
if (item.credentialType == "AccessToken") {
return item.secret;
}
} catch(e) {}
}
})()
""")
if access_token:
break
asyncio.sleep(1)
cookies = {}
for c in await page.send(nodriver.cdp.network.get_cookies([cls.url])):
cookies[c.name] = c.value
await page.close()
return access_token, cookies
2 changes: 0 additions & 2 deletions g4f/Provider/DeepInfraChat.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,8 @@
import json

from ..typing import AsyncResult, Messages, ImageType
from ..image import to_data_uri
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin


class DeepInfraChat(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://deepinfra.com/chat"
api_endpoint = "https://api.deepinfra.com/v1/openai/chat/completions"
Expand Down
1 change: 1 addition & 0 deletions g4f/Provider/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from .ChatGpt import ChatGpt
from .ChatGptEs import ChatGptEs
from .Cloudflare import Cloudflare
from .Copilot import Copilot
from .DarkAI import DarkAI
from .DDG import DDG
from .DeepInfraChat import DeepInfraChat
Expand Down
Loading

0 comments on commit b937115

Please sign in to comment.