Skip to content

Commit

Permalink
removed prefect support
Browse files Browse the repository at this point in the history
  • Loading branch information
DinisCruz committed Oct 16, 2024
1 parent 528a43c commit 03abda8
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 103 deletions.
2 changes: 1 addition & 1 deletion deploy/docker/osbot-llms/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
FROM python:3.12-slim

RUN pip install mangum uvicorn httpx openai numpy
RUN pip install osbot-aws osbot-fast-api osbot-prefect
RUN pip install osbot-aws osbot-fast-api

COPY --from=public.ecr.aws/awsguru/aws-lambda-adapter:0.8.4 /lambda-adapter /opt/extensions/lambda-adapter

Expand Down
32 changes: 13 additions & 19 deletions osbot_llms/fast_api/routes/Routes__Chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,18 @@
import traceback

from fastapi import Request
from fastapi.params import Header, Body
from osbot_prefect.flows.Flow_Events__To__Prefect_Server import Flow_Events__To__Prefect_Server
from osbot_utils.helpers.Random_Guid import Random_Guid
from osbot_utils.helpers.flows.Flow import Flow
from osbot_utils.helpers.flows.decorators.flow import flow
from osbot_utils.helpers.flows.models.Flow__Config import Flow__Config
from osbot_utils.utils.Dev import pprint
from fastapi.params import Header
from osbot_utils.helpers.flows.Flow import Flow
from osbot_utils.helpers.flows.decorators.flow import flow
from osbot_utils.helpers.flows.models.Flow_Run__Config import Flow_Run__Config
from starlette.responses import StreamingResponse
from osbot_fast_api.api.Fast_API_Routes import Fast_API_Routes
from osbot_utils.context_managers.capture_duration import capture_duration
from osbot_llms.OSBot_LLMs__Shared_Objects import osbot_llms__shared_objects
from osbot_llms.fast_api.routes.Routes__OpenAI import Routes__OpenAI
from osbot_llms.llms.chats.LLM__Chat_Completion__Resolve_Engine import LLM__Chat_Completion__Resolve_Engine
from osbot_llms.llms.storage.Chats_Storage__S3_Minio import Chats_Storage__S3_Minio
from osbot_llms.models.LLMs__Chat_Completion import LLMs__Chat_Completion, SWAGGER_EXAMPLE__LLMs__Chat_Completion
from osbot_llms.models.LLMs__Chat_Completion import LLMs__Chat_Completion, SWAGGER_EXAMPLE__LLMs__Chat_Completion

ROUTES_PATHS__CONFIG = ['/config/status', '/config/version']
HEADER_NAME__CHAT_ID = 'osbot-llms-chat-id'
Expand All @@ -36,38 +33,35 @@ def execute_llm_request(self, llm_chat_completion):
return 'no engine'

async def handle_other_llms(self, llm_chat_completion: LLMs__Chat_Completion, request: Request, request_id: str):
@flow(flow_config=Flow__Config(log_to_console=True))
@flow(flow_config=Flow_Run__Config(log_to_console=True))
def handle_other_llms__streamer() -> Flow:
print("in handle_other_llms__streamer")
print(llm_chat_completion.json())
return StreamingResponse(self.handle_other_llms__streamer(llm_chat_completion, request, request_id),media_type='text/event-stream"; charset=utf-8')

stream = llm_chat_completion.stream
if stream:
with Flow_Events__To__Prefect_Server():
with handle_other_llms__streamer() as _:
_.execute_flow()
return _.flow_return_value
with handle_other_llms__streamer() as _:
_.execute_flow()
return _.flow_return_value
else:
return await self.handle_other_llms__no_stream(llm_chat_completion, request, request_id)

async def handle_other_llms__no_stream(self, llm_chat_completion: LLMs__Chat_Completion, request: Request, request_id: str):
@flow(flow_config=Flow__Config(log_to_console=True))
@flow(flow_config=Flow_Run__Config(log_to_console=True))
def flow_handle_other_llms__no_stream() -> Flow:
print("in handle_other_llms__streamer")
print(llm_chat_completion.json())
complete_answer = self.execute_llm_request(llm_chat_completion)
try:
#request_headers = {key: value for key, value in request.headers.items()}
llm_chat_completion.llm_answer = complete_answer
except:
pass
return complete_answer

with Flow_Events__To__Prefect_Server() :
with flow_handle_other_llms__no_stream() as _:
_.execute_flow()
return _.flow_return_value
with flow_handle_other_llms__no_stream() as _:
_.execute_flow()
return _.flow_return_value


async def handle_other_llms__streamer(self, llm_chat_completion: LLMs__Chat_Completion, request: Request, request_id: str):
Expand Down
69 changes: 20 additions & 49 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 0 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# poetry install didn't work because of pendulum (which is a dependency of prefect)
# pip install prefect did work
[tool.poetry]
name = "osbot_llms"
version = "v0.2.12"
Expand All @@ -14,7 +12,6 @@ repository = "https://github.com/owasp-sbot/OSBot-LLMs"
python = "^3.11"
osbot-aws = "*"
osbot-fast-api = "*"
osbot-prefect = "*"
mangum = "*"
uvicorn = "*"
httpx = "*"
Expand Down
31 changes: 0 additions & 31 deletions tests/integration/test__prefect_support.py

This file was deleted.

0 comments on commit 03abda8

Please sign in to comment.