Skip to content

Commit

Permalink
Merge pull request #267 from parea-ai/PAI-522-enable-instantiated-oai…
Browse files Browse the repository at this point in the history
…-client-automatic-logging-in-python-sdk

Pai 522 enable instantiated oai client automatic logging in python sdk
  • Loading branch information
joschkabraun committed Jan 8, 2024
2 parents 7a0e9c3 + 81b881a commit 91422d4
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 6 deletions.
4 changes: 4 additions & 0 deletions parea/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from attrs import asdict, define, field
from cattrs import structure
from openai import OpenAI

from parea.api_client import HTTPClient
from parea.cache import InMemoryCache, RedisCache
Expand Down Expand Up @@ -46,6 +47,9 @@ def __attrs_post_init__(self):
parea_logger.set_redis_cache(self.cache)
_init_parea_wrapper(logger_all_possible, self.cache)

def wrap_openai_client(self, client: OpenAI) -> None:
OpenAIWrapper().init(log=logger_all_possible, cache=self.cache, module_client=client)

def completion(self, data: Completion) -> CompletionResponse:
parent_trace_id = get_current_trace_id()
inference_id = gen_trace_id()
Expand Down
7 changes: 4 additions & 3 deletions parea/cookbook/tracing_with_open_ai_endpoint_directly.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,23 @@
import os
from datetime import datetime

import openai
from dotenv import load_dotenv
from openai import OpenAI

from parea import Parea
from parea.schemas.models import FeedbackRequest
from parea.utils.trace_utils import get_current_trace_id, trace

load_dotenv()

openai.api_key = os.getenv("OPENAI_API_KEY")
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))

p = Parea(api_key=os.getenv("PAREA_API_KEY"))
p.wrap_openai_client(client)


def call_llm(data: list[dict], model: str = "gpt-3.5-turbo", temperature: float = 0.0) -> str:
return openai.chat.completions.create(model=model, temperature=temperature, messages=data).choices[0].message.content
return client.chat.completions.create(model=model, temperature=temperature, messages=data).choices[0].message.content


@trace
Expand Down
4 changes: 2 additions & 2 deletions parea/wrapper/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,13 +122,13 @@ class OpenAIWrapper:
except openai.OpenAIError:
original_methods = {}

def init(self, log: Callable, cache: Cache = None):
def init(self, log: Callable, cache: Cache = None, module_client=openai):
Wrapper(
resolver=self.resolver,
gen_resolver=self.gen_resolver,
agen_resolver=self.agen_resolver,
log=log,
module=openai,
module=module_client,
func_names=list(self.original_methods.keys()),
cache=cache,
convert_kwargs_to_cache_request=self.convert_kwargs_to_cache_request,
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "parea-ai"
packages = [{ include = "parea" }]
version = "0.2.28"
version = "0.2.29"
description = "Parea python sdk"
readme = "README.md"
authors = ["joel-parea-ai <[email protected]>"]
Expand Down

0 comments on commit 91422d4

Please sign in to comment.