Skip to content

Commit

Permalink
Update (g4f/models.py g4f/Provider/ChatGpt.py)
Browse files Browse the repository at this point in the history
  • Loading branch information
kqlio67 committed Nov 4, 2024
1 parent bb73a2f commit ade7a2f
Show file tree
Hide file tree
Showing 2 changed files with 63 additions and 55 deletions.
110 changes: 59 additions & 51 deletions g4f/Provider/ChatGpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
from ..typing import Messages, CreateResult
from ..providers.base_provider import AbstractProvider, ProviderModelMixin

import time, uuid, random, json
import time
import uuid
import random
import json
from requests import Session

from .openai.new import (
Expand Down Expand Up @@ -72,17 +75,34 @@ def init_session(user_agent):

class ChatGpt(AbstractProvider, ProviderModelMixin):
label = "ChatGpt"
url = "https://chatgpt.com"
working = True
supports_message_history = True
supports_system_message = True
supports_stream = True
default_model = 'auto'
models = [
default_model,
'gpt-3.5-turbo',
'gpt-4o',
'gpt-4o-mini',
'gpt-4',
'gpt-4-turbo',
'chatgpt-4o-latest',
]

model_aliases = {
"gpt-4o": "chatgpt-4o-latest",
}

@classmethod
def get_model(cls, model: str) -> str:
if model in cls.models:
return model
elif model in cls.model_aliases:
return cls.model_aliases[model]
else:
return cls.default_model

@classmethod
def create_completion(
Expand All @@ -92,30 +112,17 @@ def create_completion(
stream: bool,
**kwargs
) -> CreateResult:
model = cls.get_model(model)
if model not in cls.models:
raise ValueError(f"Model '{model}' is not available. Available models: {', '.join(cls.models)}")


if model in [
'gpt-4o',
'gpt-4o-mini',
'gpt-4',
'gpt-4-turbo',
'chatgpt-4o-latest'
]:
model = 'auto'

elif model in [
'gpt-3.5-turbo'
]:
model = 'text-davinci-002-render-sha'

else:
raise ValueError(f"Invalid model: {model}")

user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36'
user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36'
session: Session = init_session(user_agent)

config = get_config(user_agent)
pow_req = get_requirements_token(config)
headers = {
config = get_config(user_agent)
pow_req = get_requirements_token(config)
headers = {
'accept': '*/*',
'accept-language': 'en-US,en;q=0.8',
'content-type': 'application/json',
Expand All @@ -134,39 +141,35 @@ def create_completion(
}

response = session.post('https://chatgpt.com/backend-anon/sentinel/chat-requirements',
headers=headers, json={'p': pow_req})
headers=headers, json={'p': pow_req})

if response.status_code != 200:
print(f"Request failed with status: {response.status_code}")
print(f"Response content: {response.content}")
return

response_data = response.json()
if "detail" in response_data and "Unusual activity" in response_data["detail"]:
print(f"Blocked due to unusual activity: {response_data['detail']}")
return

turnstile = response_data.get('turnstile', {})
turnstile = response_data.get('turnstile', {})
turnstile_required = turnstile.get('required')
pow_conf = response_data.get('proofofwork', {})
pow_conf = response_data.get('proofofwork', {})

if turnstile_required:
turnstile_dx = turnstile.get('dx')
turnstile_dx = turnstile.get('dx')
turnstile_token = process_turnstile(turnstile_dx, pow_req)

headers = headers | {
'openai-sentinel-turnstile-token' : turnstile_token,
'openai-sentinel-chat-requirements-token': response_data.get('token'),
'openai-sentinel-proof-token' : get_answer_token(
pow_conf.get('seed'), pow_conf.get('difficulty'), config
)
}

headers = {**headers,
'openai-sentinel-turnstile-token': turnstile_token,
'openai-sentinel-chat-requirements-token': response_data.get('token'),
'openai-sentinel-proof-token': get_answer_token(
pow_conf.get('seed'), pow_conf.get('difficulty'), config
)}

json_data = {
'action': 'next',
'messages': format_conversation(messages),
'parent_message_id': str(uuid.uuid4()),
'model': 'auto',
'model': model,
'timezone_offset_min': -120,
'suggestions': [
'Can you help me create a personalized morning routine that would help increase my productivity throughout the day? Start by asking me about my current habits and what activities energize me in the morning.',
Expand All @@ -189,7 +192,7 @@ def create_completion(
'conversation_origin': None,
'client_contextual_info': {
'is_dark_mode': True,
'time_since_loaded': random.randint(22,33),
'time_since_loaded': random.randint(22, 33),
'page_height': random.randint(600, 900),
'page_width': random.randint(500, 800),
'pixel_ratio': 2,
Expand All @@ -201,25 +204,30 @@ def create_completion(
time.sleep(2)

response = session.post('https://chatgpt.com/backend-anon/conversation',
headers=headers, json=json_data, stream=True)
headers=headers, json=json_data, stream=True)

replace = ''
for line in response.iter_lines():
if line:
decoded_line = line.decode()
print(f"Received line: {decoded_line}")
print(decoded_line)

if decoded_line.startswith('data:'):
json_string = decoded_line[6:]
if json_string.strip():
json_string = decoded_line[6:].strip()

if json_string == '[DONE]':
break

if json_string:
try:
data = json.loads(json_string)
except json.JSONDecodeError as e:
print(f"Error decoding JSON: {e}, content: {json_string}")
except json.JSONDecodeError:
continue

if data.get('message').get('author').get('role') == 'assistant':
tokens = (data.get('message').get('content').get('parts')[0])

yield tokens.replace(replace, '')

replace = tokens
if data.get('message') and data['message'].get('author'):
role = data['message']['author'].get('role')
if role == 'assistant':
tokens = data['message']['content'].get('parts', [])
if tokens:
yield tokens[0].replace(replace, '')
replace = tokens[0]
8 changes: 4 additions & 4 deletions g4f/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,25 +137,25 @@ def __all__() -> list[str]:
gpt_4o = Model(
name = 'gpt-4o',
base_provider = 'OpenAI',
best_provider = IterListProvider([Blackbox, ChatGptEs, DarkAI, Editee, NexraChatGPT, Airforce, Liaobots, OpenaiChat])
best_provider = IterListProvider([Blackbox, ChatGptEs, DarkAI, Editee, NexraChatGPT, Airforce, ChatGpt, Liaobots, OpenaiChat])
)

gpt_4o_mini = Model(
name = 'gpt-4o-mini',
base_provider = 'OpenAI',
best_provider = IterListProvider([DDG, ChatGptEs, FreeNetfly, Pizzagpt, MagickPen, RubiksAI, Liaobots, Airforce, ChatgptFree, Koala, OpenaiChat, ChatGpt])
best_provider = IterListProvider([DDG, ChatGptEs, FreeNetfly, Pizzagpt, MagickPen, RubiksAI, Liaobots, ChatGpt, Airforce, ChatgptFree, Koala, OpenaiChat])
)

gpt_4_turbo = Model(
name = 'gpt-4-turbo',
base_provider = 'OpenAI',
best_provider = IterListProvider([Liaobots, Airforce, Bing])
best_provider = IterListProvider([Liaobots, Airforce, ChatGpt, Bing])
)

gpt_4 = Model(
name = 'gpt-4',
base_provider = 'OpenAI',
best_provider = IterListProvider([Chatgpt4Online, Ai4Chat, NexraBing, NexraChatGPT, Airforce, Bing, OpenaiChat, gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider])
best_provider = IterListProvider([Chatgpt4Online, Ai4Chat, NexraBing, NexraChatGPT, ChatGpt, Airforce, Bing, OpenaiChat, gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider])
)

# o1
Expand Down

0 comments on commit ade7a2f

Please sign in to comment.