Skip to content

Commit

Permalink
Add more contributers, add link to Swagger UI (xtekky#2443)
Browse files Browse the repository at this point in the history
* Add more contributers, add link to Swagger UI
* Update Dockerfile-slim
* Update retry_provider.py
* Add html preview to gui, fix urls in website manifest
* Missing chunks in OpenaiChat
  • Loading branch information
hlohaus authored Nov 30, 2024
1 parent 79c407b commit 3f93d34
Show file tree
Hide file tree
Showing 16 changed files with 216 additions and 201 deletions.
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

### Please, follow these steps to contribute:
1. Reverse a website from this list: [sites-to-reverse](https://github.com/xtekky/gpt4free/issues/40)
2. Add it to [./testing](https://github.com/xtekky/gpt4free/tree/main/testing)
2. Add it to [./etc/unittest/](https://github.com/xtekky/gpt4free/tree/main/etc/unittest/)
3. Refactor it and add it to [./g4f](https://github.com/xtekky/gpt4free/tree/main/g4f)

### We will be grateful to see you as a contributor!
100 changes: 89 additions & 11 deletions README.md

Large diffs are not rendered by default.

5 changes: 1 addition & 4 deletions docker/Dockerfile-slim
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@ RUN apt-get update && apt-get upgrade -y \
# Add user and user group
&& groupadd -g $G4F_USER_ID $G4F_USER \
&& useradd -rm -G sudo -u $G4F_USER_ID -g $G4F_USER_ID $G4F_USER \
&& mkdir -p /var/log/supervisor \
&& chown "${G4F_USER_ID}:${G4F_USER_ID}" /var/log/supervisor \
&& echo "${G4F_USER}:${G4F_USER}" | chpasswd \
&& python -m pip install --upgrade pip \
&& apt-get clean \
Expand All @@ -32,8 +30,7 @@ RUN mkdir -p $G4F_DIR
COPY requirements-slim.txt $G4F_DIR

# Upgrade pip for the latest features and install the project's Python dependencies.
RUN pip install --no-cache-dir -r requirements-slim.txt \
&& pip install --no-cache-dir duckduckgo-search>=5.0
RUN pip install --no-cache-dir -r requirements-slim.txt

# Copy the entire package into the container.
ADD --chown=$G4F_USER:$G4F_USER g4f $G4F_DIR/g4f
7 changes: 5 additions & 2 deletions docs/client.md
Original file line number Diff line number Diff line change
Expand Up @@ -185,12 +185,15 @@ print(base64_text)
**Create variations of an existing image:**
```python
from g4f.client import Client
from g4f.Provider import OpenaiChat

client = Client()
client = Client(
image_provider=OpenaiChat
)

response = client.images.create_variation(
image=open("cat.jpg", "rb"),
model="bing"
model="dall-e-3",
# Add any other necessary parameters
)

Expand Down
2 changes: 1 addition & 1 deletion docs/legacy.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import g4f

g4f.debug.logging = True # Enable debug logging
g4f.debug.version_check = False # Disable automatic version checking
print(g4f.Provider.Gemini.params) # Print supported args for Bing
print(g4f.Provider.Gemini.params) # Print supported args for Gemini

# Using automatic a provider for the given model
## Streamed completion
Expand Down
2 changes: 1 addition & 1 deletion etc/tool/contributers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import requests

url = "https://api.github.com/repos/xtekky/gpt4free/contributors"
url = "https://api.github.com/repos/xtekky/gpt4free/contributors?per_page=100"

for user in requests.get(url).json():
print(f'<a href="https://github.com/{user["login"]}" target="_blank"><img src="{user["avatar_url"]}&s=45" width="45" title="{user["login"]}"></a>')
2 changes: 1 addition & 1 deletion etc/tool/copilot.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def analyze_code(pull: PullRequest, diff: str)-> list[dict]:
else:
changed_lines.append(f"{offset_line}:{line}")
offset_line += 1

return comments

def create_analyze_prompt(changed_lines: list[str], pull: PullRequest, file_path: str):
Expand Down
33 changes: 0 additions & 33 deletions etc/tool/provider_init.py

This file was deleted.

4 changes: 2 additions & 2 deletions g4f/Provider/You.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,9 +139,9 @@ async def create_async_generator(
else:
yield ImageResponse(match.group(2), match.group(1))
else:
yield data["t"]
yield data["t"]
else:
yield data["t"]
yield data["t"]

@classmethod
async def upload_file(cls, client: StreamSession, cookies: Cookies, file: bytes, filename: str = None) -> dict:
Expand Down
14 changes: 9 additions & 5 deletions g4f/Provider/needs_auth/OpenaiChat.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,10 +128,13 @@ async def upload_image(
data=data_bytes,
headers={
"Content-Type": image_data["mime_type"],
"x-ms-blob-type": "BlockBlob"
"x-ms-blob-type": "BlockBlob",
"x-ms-version": "2020-04-08",
"Origin": "https://chatgpt.com",
"Referer": "https://chatgpt.com/",
}
) as response:
await raise_for_status(response, "Send file failed")
await raise_for_status(response)
# Post the file ID to the service and get the download URL
async with session.post(
f"{cls.url}/backend-api/files/{image_data['file_id']}/uploaded",
Expand Down Expand Up @@ -162,7 +165,7 @@ def create_messages(cls, messages: Messages, image_request: ImageRequest = None,
"id": str(uuid.uuid4()),
"create_time": int(time.time()),
"id": str(uuid.uuid4()),
"metadata": {"serialization_metadata": {"custom_symbol_offsets": []}, "system_hints": system_hints},
"metadata": {"serialization_metadata": {"custom_symbol_offsets": []}, "system_hints": system_hints},
} for message in messages]

# Check if there is an image response
Expand Down Expand Up @@ -407,7 +410,8 @@ async def iter_messages_line(cls, session: StreamSession, line: bytes, fields: C
if isinstance(line, dict) and "v" in line:
v = line.get("v")
if isinstance(v, str) and fields.is_recipient:
yield v
if "p" not in line or line.get("p") == "/message/content/parts/0":
yield v
elif isinstance(v, list) and fields.is_recipient:
for m in v:
if m.get("p") == "/message/content/parts/0":
Expand All @@ -420,7 +424,7 @@ async def iter_messages_line(cls, session: StreamSession, line: bytes, fields: C
fields.conversation_id = v.get("conversation_id")
debug.log(f"OpenaiChat: New conversation: {fields.conversation_id}")
m = v.get("message", {})
fields.is_recipient = m.get("recipient") == "all"
fields.is_recipient = m.get("recipient", "all") == "all"
if fields.is_recipient:
c = m.get("content", {})
if c.get("content_type") == "multimodal_text":
Expand Down
20 changes: 2 additions & 18 deletions g4f/cookies.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def load_cookies_from_browsers(domain_name: str, raise_requirements_error: bool
raise MissingRequirementsError('Install "browser_cookie3" package')
return {}
cookies = {}
for cookie_fn in [_g4f, chrome, chromium, opera, opera_gx, brave, edge, vivaldi, firefox]:
for cookie_fn in browsers:
try:
cookie_jar = cookie_fn(domain_name=domain_name)
if len(cookie_jar) and debug.logging:
Expand Down Expand Up @@ -188,20 +188,4 @@ def get_domain(v: dict) -> str:
for domain, new_values in new_cookies.items():
if debug.logging:
print(f"Cookies added: {len(new_values)} from {domain}")
CookiesConfig.cookies[domain] = new_values

def _g4f(domain_name: str) -> list:
"""
Load cookies from the 'g4f' browser (if exists).
Args:
domain_name (str): The domain for which to load cookies.
Returns:
list: List of cookies.
"""
if not has_platformdirs:
return []
user_data_dir = user_config_dir("g4f")
cookie_file = os.path.join(user_data_dir, "Default", "Cookies")
return [] if not os.path.exists(cookie_file) else chrome(cookie_file, domain_name)
CookiesConfig.cookies[domain] = new_values
41 changes: 41 additions & 0 deletions g4f/gui/client/static/css/style.css
Original file line number Diff line number Diff line change
Expand Up @@ -1064,6 +1064,47 @@ a:-webkit-any-link {
width: 1px;
}

.hljs-iframe-button, .hljs-iframe-close {
position: absolute;
bottom: 1rem;
right: 1rem;
padding: 7px;
border-radius: .25rem;
border: 1px solid #ffffff22;
background-color: #2d2b57;
color: #fff;
cursor: pointer;
width: 32px;
height: 32px;
}

.hljs-iframe-button:hover, .hljs-iframe-close:hover {
border-color: #ffffff44;
color: #ffffff77;
}

.hljs-iframe-container {
position: fixed;
position: absolute;
left: 0;
width: 100%;
height: 100%;
z-index: 1000001;
background-color: #fff;
padding: 0;
margin: 0;
overflow: hidden;
}

.hljs-iframe {
width: 100%;
height: 100%;
padding: 0;
margin: 0;
border: none;
overflow: auto;
}

.white {
--blur-bg: transparent;
--accent: #007bff;
Expand Down
4 changes: 2 additions & 2 deletions g4f/gui/client/static/img/site.webmanifest
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
"short_name": "",
"icons": [
{
"src": "/assets/img/android-chrome-192x192.png",
"src": "/static/img/android-chrome-192x192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "/assets/img/android-chrome-512x512.png",
"src": "/static/img/android-chrome-512x512.png",
"sizes": "512x512",
"type": "image/png"
}
Expand Down
73 changes: 60 additions & 13 deletions g4f/gui/client/static/js/chat.v1.js
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,9 @@ appStorage = window.localStorage || {
removeItem: (key) => delete self[key],
length: 0
}

appStorage.getItem("darkMode") == "false" ? document.body.classList.add("white") : null;

let markdown_render = () => null;
if (window.markdownit) {
const markdown = window.markdownit();
Expand All @@ -56,6 +59,7 @@ if (window.markdownit) {
.replaceAll('<code>', '<code class="language-plaintext">')
}
}

function filter_message(text) {
return text.replaceAll(
/<!-- generated images start -->[\s\S]+<!-- generated images end -->/gm, ""
Expand All @@ -81,7 +85,52 @@ function fallback_clipboard (text) {
document.body.removeChild(textBox);
}

const iframe_container = Object.assign(document.createElement("div"), {
className: "hljs-iframe-container hidden",
});
const iframe = Object.assign(document.createElement("iframe"), {
className: "hljs-iframe",
});
iframe_container.appendChild(iframe);
const iframe_close = Object.assign(document.createElement("button"), {
className: "hljs-iframe-close",
innerHTML: '<i class="fa-regular fa-x"></i>',
});
iframe_close.onclick = () => iframe_container.classList.add("hidden");
iframe_container.appendChild(iframe_close);
chat.appendChild(iframe_container);

class HtmlRenderPlugin {
constructor(options = {}) {
self.hook = options.hook;
self.callback = options.callback
}
"after:highlightElement"({
el,
text
}) {
if (!el.classList.contains("language-html")) {
return;
}
let button = Object.assign(document.createElement("button"), {
innerHTML: '<i class="fa-regular fa-folder-open"></i>',
className: "hljs-iframe-button",
});
el.parentElement.appendChild(button);
button.onclick = async () => {
let newText = text;
if (hook && typeof hook === "function") {
newText = hook(text, el) || text
}
iframe.src = `data:text/html;charset=utf-8,${encodeURIComponent(newText)}`;
iframe_container.classList.remove("hidden");
if (typeof callback === "function") return callback(newText, el);
}
}
}

hljs.addPlugin(new CopyButtonPlugin());
hljs.addPlugin(new HtmlRenderPlugin())
let typesetPromise = Promise.resolve();
const highlight = (container) => {
container.querySelectorAll('code:not(.hljs').forEach((el) => {
Expand Down Expand Up @@ -371,16 +420,17 @@ document.querySelector(".media_player .fa-x").addEventListener("click", ()=>{
});

const prepare_messages = (messages, message_index = -1) => {
if (message_index >= 0) {
messages = messages.filter((_, index) => message_index >= index);
}

// Removes none user messages at end
let last_message;
while (last_message = messages.pop()) {
if (last_message["role"] == "user") {
messages.push(last_message);
break;
if (message_index != null) {
if (message_index >= 0) {
messages = messages.filter((_, index) => message_index >= index);
}
// Removes none user messages at end
let last_message;
while (last_message = messages.pop()) {
if (last_message["role"] == "user") {
messages.push(last_message);
break;
}
}
}

Expand Down Expand Up @@ -1313,9 +1363,6 @@ async function on_api() {
}
const darkMode = document.getElementById("darkMode");
if (darkMode) {
if (!darkMode.checked) {
document.body.classList.add("white");
}
darkMode.addEventListener('change', async (event) => {
if (event.target.checked) {
document.body.classList.remove("white");
Expand Down
1 change: 1 addition & 0 deletions g4f/providers/retry_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,7 @@ async def create_async_generator(
timeout=kwargs.get("timeout", DEFAULT_TIMEOUT),
)
if chunk:
yield chunk
started = True
elif hasattr(provider, "create_async_generator"):
async for chunk in provider.create_async_generator(model, messages, stream=stream, **kwargs):
Expand Down
Loading

0 comments on commit 3f93d34

Please sign in to comment.