Skip to content

Commit

Permalink
🧵 Add streaming response with async openapi client
Browse files Browse the repository at this point in the history
  • Loading branch information
steffenslavetinsky committed Jan 24, 2024
1 parent 683e65a commit c3a1b19
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions renumics/spotlight/backend/websockets.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import pandas as pd
from fastapi import WebSocket, WebSocketDisconnect
from loguru import logger
from openai import OpenAI
from openai import AsyncOpenAI
from pydantic import BaseModel
from typing_extensions import Literal

Expand All @@ -29,7 +29,7 @@
from .tasks import TaskManager, TaskCancelled
from .tasks.reduction import compute_umap, compute_pca

openai_client = OpenAI()
openai_client = AsyncOpenAI()


class Message(BaseModel):
Expand Down Expand Up @@ -405,7 +405,7 @@ async def _(data: ChatData, connection: WebsocketConnection) -> None:
return

try:
text2sql_completion = openai_client.chat.completions.create(
text2sql_completion = await openai_client.chat.completions.create(
model="gpt-4",
messages=[
{
Expand Down Expand Up @@ -450,13 +450,13 @@ async def _(data: ChatData, connection: WebsocketConnection) -> None:
question=data.message, query_result=df.to_markdown()
)

completion = openai_client.chat.completions.create(
completion = await openai_client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": prompt}],
stream=True,
)

for chunk in completion:
async for chunk in completion:
print(chunk.choices[0].delta)
content = chunk.choices[0].delta.content
if content:
Expand Down

0 comments on commit c3a1b19

Please sign in to comment.