Skip to content

Commit

Permalink
new cookbook, set openai.api_type
Browse files Browse the repository at this point in the history
  • Loading branch information
jalexanderII committed Jun 10, 2024
1 parent 03a4d51 commit 205be91
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 1 deletion.
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import os

from dotenv import load_dotenv
from openai import OpenAI

from parea import Parea, trace
from parea.evals.rag import context_query_relevancy_factory
from parea.schemas import TestCase

load_dotenv()

client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
p = Parea(api_key=os.getenv("PAREA_API_KEY"))
p.wrap_openai_client(client)

context_query_relevancy = context_query_relevancy_factory(model="gpt-4o", context_fields=["context"])


@trace(eval_funcs=[context_query_relevancy])
def run_experiment(question: str, context: str) -> str:
return (
client.chat.completions.create(
model="gpt-4o",
temperature=0,
messages=[{"role": "user", "content": f"Answer question using context. Context: {context}. Question: {question}"}],
)
.choices[0]
.message.content
)


# You can fetch a dataset directly and then modify it to meet our needs before passing it to p.experiment.
def rename_information_to_context(num_samples: int = 3):
dataset = p.get_collection("Example_Dataset_Name")
if dataset:
testcases: list[TestCase] = list(dataset.test_cases.values())
# Assume dataset looks like this:
# [
# inputs={"information": "Some long document", "question": "What is X?"}, target="X is Y" ...
# ]
return [{"context": case.inputs["information"], "question": case.inputs["question"], "target": case.target} for case in testcases[:num_samples]]
return []


def main():
data = rename_information_to_context()
experiment = p.experiment("My_Experiment_Name", func=run_experiment, data=data)
experiment.run()


if __name__ == "__main__":
main()
5 changes: 5 additions & 0 deletions parea/evals/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,12 @@ def safe_json_loads(s) -> dict:
def call_openai(
messages, model, temperature=1.0, max_tokens=None, top_p=1.0, frequency_penalty=0.0, presence_penalty=0.0, response_format=None, n=1, is_azure=False
) -> Union[str, List[str]]:
openai.api_type = "openai"
if is_azure:
from openai.lib.azure import AzureOpenAI

openai.api_type = "azure"

completion = AzureOpenAI().chat.completions.create(
model=model,
messages=messages,
Expand Down Expand Up @@ -94,9 +97,11 @@ def call_openai(


def embed(model, input, is_azure=False) -> List[float]:
openai.api_type = "openai"
if is_azure:
from openai.lib.azure import AzureOpenAI

openai.api_type = "azure"
return AzureOpenAI().embeddings.create(model=model, input=input, encoding_format="float").data[0].embedding
if openai_version.startswith("0."):
return openai.Embedding.create(model=model, input=input, encoding_format="float").data[0]["embedding"]
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "parea-ai"
packages = [{ include = "parea" }]
version = "0.2.171"
version = "0.2.172"
description = "Parea python sdk"
readme = "README.md"
authors = ["joel-parea-ai <[email protected]>"]
Expand Down

0 comments on commit 205be91

Please sign in to comment.